[ 550.276095] env[69227]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69227) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 550.276456] env[69227]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69227) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 550.276531] env[69227]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69227) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 550.276872] env[69227]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 550.373202] env[69227]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69227) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 550.383071] env[69227]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69227) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 550.986274] env[69227]: INFO nova.virt.driver [None req-924b9570-7123-469b-861f-c22dcb7d8dc4 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 551.056957] env[69227]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 551.057191] env[69227]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 551.057327] env[69227]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69227) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 554.291022] env[69227]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-eb10ab1c-49f4-488b-a4b0-d2c4c7907700 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.307086] env[69227]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69227) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 554.307207] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-d6364ea5-f1c1-482e-a11f-1226dff5b33e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.342085] env[69227]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 7089d. [ 554.342221] env[69227]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.285s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 554.342805] env[69227]: INFO nova.virt.vmwareapi.driver [None req-924b9570-7123-469b-861f-c22dcb7d8dc4 None None] VMware vCenter version: 7.0.3 [ 554.346233] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bc53cb-0822-4a44-ad31-f025e5447cdd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.363484] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddb04fb-ab33-45bb-bfbc-965ca9f3cad7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.369493] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d652cb19-eadd-4e0a-b78a-8ac71fa1f3a1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.376065] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b600f856-8889-47bc-a892-26cb60029144 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.389145] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7203242d-6247-4ea6-9638-513c2a175149 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.395297] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a596a20-f60a-4597-9a39-7fdd5e082656 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.425979] env[69227]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-5296a51f-5036-4569-be94-9ca787a51af6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.431618] env[69227]: DEBUG nova.virt.vmwareapi.driver [None req-924b9570-7123-469b-861f-c22dcb7d8dc4 None None] Extension org.openstack.compute already exists. {{(pid=69227) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 554.434345] env[69227]: INFO nova.compute.provider_config [None req-924b9570-7123-469b-861f-c22dcb7d8dc4 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 554.937933] env[69227]: DEBUG nova.context [None req-924b9570-7123-469b-861f-c22dcb7d8dc4 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),f93ba009-d8b4-4c9f-b4e5-72ddebc02a14(cell1) {{(pid=69227) load_cells /opt/stack/nova/nova/context.py:464}} [ 554.940100] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 554.940336] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 554.941097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 554.941596] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Acquiring lock "f93ba009-d8b4-4c9f-b4e5-72ddebc02a14" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 554.941750] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Lock "f93ba009-d8b4-4c9f-b4e5-72ddebc02a14" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 554.942834] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Lock "f93ba009-d8b4-4c9f-b4e5-72ddebc02a14" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 554.963115] env[69227]: INFO dbcounter [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Registered counter for database nova_cell0 [ 554.971224] env[69227]: INFO dbcounter [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Registered counter for database nova_cell1 [ 554.974625] env[69227]: DEBUG oslo_db.sqlalchemy.engines [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69227) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 554.974988] env[69227]: DEBUG oslo_db.sqlalchemy.engines [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69227) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 554.980141] env[69227]: ERROR nova.db.main.api [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 554.980141] env[69227]: result = function(*args, **kwargs) [ 554.980141] env[69227]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.980141] env[69227]: return func(*args, **kwargs) [ 554.980141] env[69227]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 554.980141] env[69227]: result = fn(*args, **kwargs) [ 554.980141] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 554.980141] env[69227]: return f(*args, **kwargs) [ 554.980141] env[69227]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 554.980141] env[69227]: return db.service_get_minimum_version(context, binaries) [ 554.980141] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 554.980141] env[69227]: _check_db_access() [ 554.980141] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 554.980141] env[69227]: stacktrace = ''.join(traceback.format_stack()) [ 554.980141] env[69227]: [ 554.981267] env[69227]: ERROR nova.db.main.api [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 554.981267] env[69227]: result = function(*args, **kwargs) [ 554.981267] env[69227]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.981267] env[69227]: return func(*args, **kwargs) [ 554.981267] env[69227]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 554.981267] env[69227]: result = fn(*args, **kwargs) [ 554.981267] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 554.981267] env[69227]: return f(*args, **kwargs) [ 554.981267] env[69227]: File "/opt/stack/nova/nova/objects/service.py", line 553, in _db_service_get_minimum_version [ 554.981267] env[69227]: return db.service_get_minimum_version(context, binaries) [ 554.981267] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 554.981267] env[69227]: _check_db_access() [ 554.981267] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 554.981267] env[69227]: stacktrace = ''.join(traceback.format_stack()) [ 554.981267] env[69227]: [ 554.981863] env[69227]: WARNING nova.objects.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 554.981863] env[69227]: WARNING nova.objects.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Failed to get minimum service version for cell f93ba009-d8b4-4c9f-b4e5-72ddebc02a14 [ 554.982231] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Acquiring lock "singleton_lock" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.982394] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Acquired lock "singleton_lock" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 554.982636] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Releasing lock "singleton_lock" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 554.983035] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Full set of CONF: {{(pid=69227) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 554.983136] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ******************************************************************************** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 554.983244] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Configuration options gathered from: {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 554.983380] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 554.983568] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 554.983699] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ================================================================================ {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 554.983903] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] allow_resize_to_same_host = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984083] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] arq_binding_timeout = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984217] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] backdoor_port = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984343] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] backdoor_socket = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984507] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] block_device_allocate_retries = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984666] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] block_device_allocate_retries_interval = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.984833] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cert = self.pem {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985018] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985181] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute_monitors = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985348] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] config_dir = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985517] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] config_drive_format = iso9660 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985652] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.985842] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] config_source = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986029] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] console_host = devstack {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986200] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] control_exchange = nova {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986361] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cpu_allocation_ratio = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986521] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] daemon = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986701] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] debug = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.986925] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_access_ip_network_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.987133] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_availability_zone = nova {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.987299] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_ephemeral_format = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.987462] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_green_pool_size = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.987711] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.987880] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] default_schedule_zone = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988074] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] disk_allocation_ratio = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988255] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] enable_new_services = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988436] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] enabled_apis = ['osapi_compute'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988600] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] enabled_ssl_apis = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988759] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] flat_injected = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.988917] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] force_config_drive = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989089] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] force_raw_images = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989263] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] graceful_shutdown_timeout = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989422] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] heal_instance_info_cache_interval = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989644] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] host = cpu-1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989822] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.989986] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] initial_disk_allocation_ratio = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.990165] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] initial_ram_allocation_ratio = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.990388] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.990555] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_build_timeout = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.990717] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_delete_interval = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.990886] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_format = [instance: %(uuid)s] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991065] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_name_template = instance-%08x {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991234] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_usage_audit = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991403] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_usage_audit_period = month {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991568] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991733] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] instances_path = /opt/stack/data/nova/instances {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.991901] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] internal_service_availability_zone = internal {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992065] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] key = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992227] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] live_migration_retry_count = 30 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992392] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_color = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992556] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_config_append = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992723] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.992883] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_dir = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993050] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993185] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_options = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993350] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_rotate_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993520] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_rotate_interval_type = days {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993694] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] log_rotation_type = none {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993826] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.993955] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994139] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994308] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994437] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994601] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] long_rpc_timeout = 1800 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994760] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_concurrent_builds = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.994921] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_concurrent_live_migrations = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995088] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_concurrent_snapshots = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995250] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_local_block_devices = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995407] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_logfile_count = 30 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995563] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] max_logfile_size_mb = 200 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995747] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] maximum_instance_delete_attempts = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.995927] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metadata_listen = 0.0.0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996115] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metadata_listen_port = 8775 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996289] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metadata_workers = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996453] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] migrate_max_retries = -1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996623] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] mkisofs_cmd = genisoimage {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996839] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] my_block_storage_ip = 10.180.1.21 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.996984] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] my_ip = 10.180.1.21 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.997163] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] network_allocate_retries = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.997345] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.997512] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] osapi_compute_listen = 0.0.0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.997671] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] osapi_compute_listen_port = 8774 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.997835] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] osapi_compute_unique_server_name_scope = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998015] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] osapi_compute_workers = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998204] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] password_length = 12 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998367] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] periodic_enable = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998526] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] periodic_fuzzy_delay = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998694] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] pointer_model = usbtablet {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.998861] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] preallocate_images = none {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999030] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] publish_errors = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999166] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] pybasedir = /opt/stack/nova {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999324] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ram_allocation_ratio = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999485] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rate_limit_burst = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999653] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rate_limit_except_level = CRITICAL {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999811] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rate_limit_interval = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 554.999970] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reboot_timeout = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000142] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reclaim_instance_interval = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000299] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] record = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000464] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reimage_timeout_per_gb = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000628] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] report_interval = 120 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000786] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rescue_timeout = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.000944] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reserved_host_cpus = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001114] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reserved_host_disk_mb = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001273] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reserved_host_memory_mb = 512 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001435] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] reserved_huge_pages = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001596] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] resize_confirm_window = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001758] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] resize_fs_using_block_device = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.001918] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] resume_guests_state_on_host_boot = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002099] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002266] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] rpc_response_timeout = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002428] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] run_external_periodic_tasks = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002596] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] running_deleted_instance_action = reap {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002756] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] running_deleted_instance_poll_interval = 1800 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.002917] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] running_deleted_instance_timeout = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003092] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler_instance_sync_interval = 120 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003259] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_down_time = 720 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003424] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] servicegroup_driver = db {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003580] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] shell_completion = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003738] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] shelved_offload_time = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.003898] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] shelved_poll_interval = 3600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.004074] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] shutdown_timeout = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.004235] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] source_is_ipv6 = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.004392] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ssl_only = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.004638] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005078] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] sync_power_state_interval = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005265] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] sync_power_state_pool_size = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005444] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] syslog_log_facility = LOG_USER {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005604] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] tempdir = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005795] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] timeout_nbd = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.005978] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] transport_url = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006159] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] update_resources_interval = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006325] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_cow_images = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006490] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_journal = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006650] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_json = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006811] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_rootwrap_daemon = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.006971] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_stderr = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.007170] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] use_syslog = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.007341] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vcpu_pin_set = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.007513] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plugging_is_fatal = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.007684] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plugging_timeout = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.007853] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] virt_mkfs = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.008037] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] volume_usage_poll_interval = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.008213] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] watch_log_file = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.008383] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] web = /usr/share/spice-html5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 555.008564] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_concurrency.disable_process_locking = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.008851] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.009047] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.009221] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.009392] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.009583] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.009808] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010016] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.auth_strategy = keystone {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010193] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.compute_link_prefix = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010374] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010548] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.dhcp_domain = novalocal {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010719] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.enable_instance_password = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.010883] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.glance_link_prefix = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011058] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011241] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011406] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.instance_list_per_project_cells = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011567] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.list_records_by_skipping_down_cells = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011727] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.local_metadata_per_cell = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.011896] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.max_limit = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012075] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.metadata_cache_expiration = 15 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012253] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.neutron_default_tenant_id = default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012417] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.use_neutron_default_nets = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012599] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012782] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.012956] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.013147] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.013322] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_dynamic_targets = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.013486] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_jsonfile_path = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.013669] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.013864] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.backend = dogpile.cache.memcached {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014044] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.backend_argument = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014210] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.backend_expiration_time = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014381] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.config_prefix = cache.oslo {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014551] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.dead_timeout = 60.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014714] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.debug_cache_backend = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.014877] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.enable_retry_client = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015059] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.enable_socket_keepalive = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015234] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.enabled = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015401] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.enforce_fips_mode = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015564] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.expiration_time = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015779] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.hashclient_retry_attempts = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.015963] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.hashclient_retry_delay = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.016178] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_dead_retry = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.016351] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_password = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.016517] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.016682] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.016870] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_pool_maxsize = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017058] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017228] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_sasl_enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017409] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017576] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_socket_timeout = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017736] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.memcache_username = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.017902] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.proxies = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018078] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_db = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018240] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_password = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018412] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_sentinel_service_name = mymaster {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018590] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018778] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_server = localhost:6379 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.018951] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_socket_timeout = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019131] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.redis_username = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019301] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.retry_attempts = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019470] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.retry_delay = 0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019635] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.socket_keepalive_count = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019800] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.socket_keepalive_idle = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.019965] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.socket_keepalive_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020138] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.tls_allowed_ciphers = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020298] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.tls_cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020456] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.tls_certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020620] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.tls_enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020777] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cache.tls_keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.020948] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021139] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.auth_type = password {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021305] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021481] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.catalog_info = volumev3::publicURL {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021643] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021809] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.021982] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.cross_az_attach = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022155] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.debug = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022318] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.endpoint_template = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022484] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.http_retries = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022646] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022805] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.022980] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.os_region_name = RegionOne {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023159] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023323] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cinder.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023494] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023655] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.cpu_dedicated_set = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023814] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.cpu_shared_set = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.023981] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.image_type_exclude_list = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024157] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024322] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.max_concurrent_disk_ops = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024485] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.max_disk_devices_to_attach = -1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024648] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024819] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.024982] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.resource_provider_association_refresh = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.025160] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.shutdown_retry_interval = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.025342] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.025520] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] conductor.workers = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.025715] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] console.allowed_origins = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.025884] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] console.ssl_ciphers = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026072] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] console.ssl_minimum_version = default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026247] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] consoleauth.enforce_session_timeout = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026417] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] consoleauth.token_ttl = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026586] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026744] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.026932] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027111] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027277] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027437] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027600] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027756] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.027914] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028081] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028241] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028397] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028552] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028723] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.service_type = accelerator {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.028886] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029056] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029219] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029376] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029559] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029722] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] cyborg.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.029892] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.asyncio_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030063] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.asyncio_slave_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030240] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.backend = sqlalchemy {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030410] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030576] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.connection_debug = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030748] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.connection_parameters = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.030915] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.connection_recycle_time = 3600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031091] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.connection_trace = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031255] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.db_inc_retry_interval = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031417] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.db_max_retries = 20 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031578] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.db_max_retry_interval = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031739] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.db_retry_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.031901] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.max_overflow = 50 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032071] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.max_pool_size = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032235] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.max_retries = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032403] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032561] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.mysql_wsrep_sync_wait = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032717] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.pool_timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.032880] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.retry_interval = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033047] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.slave_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033212] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.sqlite_synchronous = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033369] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] database.use_db_reconnect = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033533] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.asyncio_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033699] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.asyncio_slave_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.033871] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.backend = sqlalchemy {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034050] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034217] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.connection_debug = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034388] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.connection_parameters = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034551] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.connection_recycle_time = 3600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034710] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.connection_trace = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.034872] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.db_inc_retry_interval = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035042] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.db_max_retries = 20 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035209] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.db_max_retry_interval = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035372] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.db_retry_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035531] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.max_overflow = 50 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035711] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.max_pool_size = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.035884] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.max_retries = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036070] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036236] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036395] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.pool_timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036556] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.retry_interval = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036716] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.slave_connection = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.036889] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] api_database.sqlite_synchronous = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037084] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] devices.enabled_mdev_types = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037270] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037442] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ephemeral_storage_encryption.default_format = luks {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037608] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ephemeral_storage_encryption.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037771] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.037945] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.api_servers = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038144] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038317] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038483] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038641] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038801] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.038963] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.debug = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039145] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.default_trusted_certificate_ids = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039307] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.enable_certificate_validation = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039469] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.enable_rbd_download = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039627] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039792] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.039955] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040125] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040282] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040442] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.num_retries = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040611] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.rbd_ceph_conf = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040774] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.rbd_connect_timeout = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.040943] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.rbd_pool = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041157] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.rbd_user = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041343] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041506] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041664] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041832] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.service_type = image {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.041998] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042172] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042340] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042496] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042675] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042840] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.verify_glance_signatures = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.042999] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] glance.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.043178] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] guestfs.debug = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.043344] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] mks.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.043695] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.043889] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.manager_interval = 2400 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044072] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.precache_concurrency = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044249] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.remove_unused_base_images = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044419] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044587] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044763] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] image_cache.subdirectory_name = _base {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.044942] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.api_max_retries = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045123] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.api_retry_interval = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045289] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045452] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.auth_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045613] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045796] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.045971] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046152] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.conductor_group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046314] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046475] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046633] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046799] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.046957] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047128] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047287] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047454] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.peer_list = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047613] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047769] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.047931] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.serial_console_state_timeout = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.048126] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.048312] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.service_type = baremetal {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.048474] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.shard = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.048635] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.048812] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049050] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049235] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049424] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049588] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ironic.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049771] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.049948] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] key_manager.fixed_key = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050147] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050312] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.barbican_api_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050472] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.barbican_endpoint = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050640] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.barbican_endpoint_type = public {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050799] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.barbican_region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.050958] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051129] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051292] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051452] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051606] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051768] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.number_of_retries = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.051929] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.retry_delay = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.052109] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.send_service_user_token = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.052275] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.052434] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.052653] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.verify_ssl = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.052827] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican.verify_ssl_path = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053009] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053180] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.auth_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053340] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053497] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053660] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.053858] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054035] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054205] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054362] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] barbican_service_user.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054528] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.approle_role_id = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054687] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.approle_secret_id = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.054860] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.kv_mountpoint = secret {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055030] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.kv_path = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055209] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.kv_version = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055373] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.namespace = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055535] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.root_token_id = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055723] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.ssl_ca_crt_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.055904] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.timeout = 60.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056089] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.use_ssl = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056266] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056441] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056606] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.auth_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056768] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.056929] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057108] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057272] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057432] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057589] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057750] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.057908] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058076] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058237] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058392] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058551] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058707] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.058880] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.service_type = identity {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059046] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059209] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059370] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059527] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059712] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.059870] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] keystone.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060079] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.connection_uri = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060244] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_mode = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060411] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_model_extra_flags = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060580] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_models = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060749] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_power_governor_high = performance {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.060918] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_power_governor_low = powersave {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061091] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_power_management = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061268] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061434] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.device_detach_attempts = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061595] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.device_detach_timeout = 20 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061760] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.disk_cachemodes = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.061919] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.disk_prefix = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062096] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.enabled_perf_events = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062263] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.file_backed_memory = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062431] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.gid_maps = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062591] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.hw_disk_discard = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062750] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.hw_machine_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.062923] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_rbd_ceph_conf = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063101] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063272] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063442] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_rbd_glance_store_name = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063612] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_rbd_pool = rbd {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063805] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_type = default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.063976] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.images_volume_group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064154] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.inject_key = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064320] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.inject_partition = -2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064481] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.inject_password = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064642] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.iscsi_iface = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064804] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.iser_use_multipath = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.064968] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_bandwidth = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065142] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065307] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_downtime = 500 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065468] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065633] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065823] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_inbound_addr = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.065994] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.066170] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_permit_post_copy = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.066335] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_scheme = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.066512] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_timeout_action = abort {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.066679] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_tunnelled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.066839] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_uri = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.067008] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.live_migration_with_native_tls = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.067176] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.max_queues = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.067339] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.067565] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.067730] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.nfs_mount_options = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068079] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068275] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068446] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_iser_scan_tries = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068611] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_memory_encrypted_guests = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068775] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.068942] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_pcie_ports = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.069124] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.num_volume_scan_tries = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.069294] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.pmem_namespaces = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.069454] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.quobyte_client_cfg = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.069758] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.069937] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rbd_connect_timeout = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070120] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070292] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070455] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rbd_secret_uuid = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070614] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rbd_user = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070780] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.070956] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.remote_filesystem_transport = ssh {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071132] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rescue_image_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071295] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rescue_kernel_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071453] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rescue_ramdisk_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071621] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071782] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.rx_queue_size = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.071952] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.smbfs_mount_options = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.072257] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.072443] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.snapshot_compression = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.072611] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.snapshot_image_format = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.072834] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073013] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.sparse_logical_volumes = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073185] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.swtpm_enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073357] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.swtpm_group = tss {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073528] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.swtpm_user = tss {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073710] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.sysinfo_serial = unique {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.073895] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.tb_cache_size = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074072] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.tx_queue_size = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074245] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.uid_maps = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074409] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.use_virtio_for_bridges = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074581] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.virt_type = kvm {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074750] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.volume_clear = zero {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.074917] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.volume_clear_size = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075095] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.volume_use_multipath = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075259] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_cache_path = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075430] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075599] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_mount_group = qemu {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075788] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_mount_opts = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.075968] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.076277] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.076461] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.vzstorage_mount_user = stack {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.076633] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.076810] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.076989] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.auth_type = password {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.077168] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.077332] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.077499] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.077661] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.077824] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078087] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.default_floating_pool = public {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078287] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078456] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.extension_sync_interval = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078621] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.http_retries = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078785] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.078948] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079124] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079299] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079460] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079628] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.ovs_bridge = br-int {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079795] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.physnets = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.079967] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.region_name = RegionOne {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080143] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080316] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.service_metadata_proxy = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080477] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080646] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.service_type = network {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080809] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.080973] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.081147] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.081308] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.081495] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.081655] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] neutron.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.081828] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] notifications.bdms_in_notifications = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082016] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] notifications.default_level = INFO {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082204] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] notifications.notification_format = unversioned {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082369] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] notifications.notify_on_state_change = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082546] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082722] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] pci.alias = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.082892] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] pci.device_spec = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083068] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] pci.report_in_placement = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083244] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083417] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.auth_type = password {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083586] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083770] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.083950] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084130] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084294] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084451] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084610] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.default_domain_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084770] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.default_domain_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.084930] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.domain_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085101] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.domain_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085265] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085427] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085584] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085764] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.085940] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086124] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.password = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086289] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.project_domain_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086460] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.project_domain_name = Default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086628] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.project_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086805] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.project_name = service {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.086977] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.region_name = RegionOne {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087155] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087317] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087490] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.service_type = placement {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087656] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087818] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.087983] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088182] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.system_scope = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088346] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088505] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.trust_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088662] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.user_domain_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088830] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.user_domain_name = Default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.088992] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.user_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.089178] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.username = nova {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.089361] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.089523] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] placement.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.089702] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.cores = 20 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.089870] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.count_usage_from_placement = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090083] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090278] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.injected_file_content_bytes = 10240 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090451] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.injected_file_path_length = 255 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090617] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.injected_files = 5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090785] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.instances = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.090954] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.key_pairs = 100 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091135] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.metadata_items = 128 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091306] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.ram = 51200 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091470] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.recheck_quota = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091636] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.server_group_members = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091803] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] quota.server_groups = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.091978] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092155] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092318] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.image_metadata_prefilter = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092478] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092640] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.max_attempts = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092807] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.max_placement_results = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.092971] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.093148] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.query_placement_for_image_type_support = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.093313] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.093486] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] scheduler.workers = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.093658] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.093873] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094077] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094253] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094424] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094588] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094753] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.094945] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095128] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.host_subset_size = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095295] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095454] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095617] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095818] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.isolated_hosts = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.095991] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.isolated_images = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.096175] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.096338] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.096502] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.096665] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.pci_in_placement = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.096829] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097006] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097192] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097353] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097516] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097679] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.097842] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.track_instance_changes = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.098049] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.098271] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metrics.required = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.098448] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metrics.weight_multiplier = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.098617] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.098787] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] metrics.weight_setting = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.099191] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.099385] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.099569] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.port_range = 10000:20000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.099746] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.099920] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100105] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] serial_console.serialproxy_port = 6083 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100281] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100458] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.auth_type = password {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100622] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100781] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.100946] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101124] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101287] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101460] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.send_service_user_token = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101624] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101785] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] service_user.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.101991] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.agent_enabled = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.102226] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.102543] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.102761] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.102937] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.html5proxy_port = 6082 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103115] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.image_compression = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103283] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.jpeg_compression = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103443] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.playback_compression = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103615] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.server_listen = 127.0.0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103810] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.103981] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.streaming_mode = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104157] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] spice.zlib_compression = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104325] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] upgrade_levels.baseapi = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104499] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] upgrade_levels.compute = auto {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104661] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] upgrade_levels.conductor = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104821] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] upgrade_levels.scheduler = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.104986] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.105164] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.auth_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.105323] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.105480] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.105643] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.105835] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106009] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106186] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106347] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vendordata_dynamic_auth.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106522] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.api_retry_count = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106685] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.ca_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.106860] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.cache_prefix = devstack-image-cache {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107040] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.cluster_name = testcl1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107213] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.connection_pool_size = 10 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107376] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.console_delay_seconds = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107546] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.datastore_regex = ^datastore.* {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107761] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.107950] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.host_password = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108129] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.host_port = 443 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108304] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.host_username = administrator@vsphere.local {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108474] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.insecure = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108637] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.integration_bridge = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108802] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.maximum_objects = 100 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.108965] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.pbm_default_policy = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109139] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.pbm_enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109300] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.pbm_wsdl_location = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109469] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109629] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.serial_port_proxy_uri = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109788] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.serial_port_service_uri = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.109956] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.task_poll_interval = 0.5 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.110141] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.use_linked_clone = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.110313] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.vnc_keymap = en-us {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.110477] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.vnc_port = 5900 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.110640] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vmware.vnc_port_total = 10000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.110826] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.auth_schemes = ['none'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.111013] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.111314] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.111502] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.111673] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.novncproxy_port = 6080 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.111872] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.server_listen = 127.0.0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112070] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112240] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.vencrypt_ca_certs = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112403] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.vencrypt_client_cert = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112565] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vnc.vencrypt_client_key = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112745] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.112910] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113083] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113247] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113406] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.disable_rootwrap = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113566] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.enable_numa_live_migration = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113758] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.113949] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114128] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114291] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.libvirt_disable_apic = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114451] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114612] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114773] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.114937] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115108] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115271] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115429] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115588] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115769] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.115973] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.116185] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.116367] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.client_socket_timeout = 900 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.116623] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.default_pool_size = 1000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.116889] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.keep_alive = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.117161] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.max_header_line = 16384 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.117352] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.secure_proxy_ssl_header = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.117517] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.ssl_ca_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.117681] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.ssl_cert_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.117845] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.ssl_key_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.118027] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.tcp_keepidle = 600 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.118222] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.118394] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] zvm.ca_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.118559] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] zvm.cloud_connector_url = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.118854] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.119045] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] zvm.reachable_timeout = 300 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.119235] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.enforce_new_defaults = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.119619] env[69227]: WARNING oslo_config.cfg [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 555.119811] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.enforce_scope = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.119989] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.policy_default_rule = default {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.120185] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.120360] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.policy_file = policy.yaml {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.120536] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.120699] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.120862] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121032] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121203] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121372] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_policy.remote_timeout = 60.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121540] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121717] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.121895] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.connection_string = messaging:// {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122078] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.enabled = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122253] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.es_doc_type = notification {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122419] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.es_scroll_size = 10000 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122588] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.es_scroll_time = 2m {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122754] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.filter_error_trace = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.122923] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.hmac_keys = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123105] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.sentinel_service_name = mymaster {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123276] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.socket_timeout = 0.1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123441] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.trace_requests = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123604] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler.trace_sqlalchemy = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123805] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler_jaeger.process_tags = {} {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.123975] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler_jaeger.service_name_prefix = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124158] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] profiler_otlp.service_name_prefix = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124326] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] remote_debug.host = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124486] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] remote_debug.port = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124661] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124826] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.124991] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.125169] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.125331] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.125491] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.125651] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.125840] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126014] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126195] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126356] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126529] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126716] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.126900] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127078] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127255] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127420] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127584] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127755] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.127918] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128092] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128262] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128426] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128588] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128751] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.128912] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129083] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129247] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129408] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129567] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129726] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.129893] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130070] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130236] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130405] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130572] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.ssl_version = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130737] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.130922] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131101] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_notifications.retry = -1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131287] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131462] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_messaging_notifications.transport_url = **** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131643] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.auth_section = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131813] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.auth_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.131972] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.cafile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132144] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.certfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132311] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.collect_timing = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132470] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.connect_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132628] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.connect_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132786] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_id = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.132958] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_interface = publicURL {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133131] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_override = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133289] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133446] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133605] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.endpoint_service_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133789] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.insecure = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.133992] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.keyfile = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.134187] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.max_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.134386] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.min_version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.134555] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.region_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.134718] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.retriable_status_codes = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.134880] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.service_name = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135052] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.service_type = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135222] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.split_loggers = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135382] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.status_code_retries = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135542] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.status_code_retry_delay = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135715] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.timeout = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.135889] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.valid_interfaces = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136061] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_limit.version = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136230] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_reports.file_event_handler = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136394] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136554] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] oslo_reports.log_dir = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136744] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.136918] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137097] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137270] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137434] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137592] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137763] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.137925] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138182] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138282] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138449] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138597] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] vif_plug_ovs_privileged.user = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138770] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.flat_interface = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.138950] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139142] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139316] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139490] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139662] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139829] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.139995] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.140192] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.140374] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.isolate_vif = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.140542] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.140707] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.140878] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141072] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.ovsdb_interface = native {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141243] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_vif_ovs.per_port_bridge = False {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141412] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_brick.lock_path = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141579] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141752] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.141926] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.capabilities = [21] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142100] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142261] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.helper_command = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142427] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142591] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142748] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] privsep_osbrick.user = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.142923] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143092] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.group = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143253] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.helper_command = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143419] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143581] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143758] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] nova_sys_admin.user = None {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 555.143906] env[69227]: DEBUG oslo_service.backend.eventlet.service [None req-fb3a6d81-aba6-434f-b3ca-066cd662aea8 None None] ******************************************************************************** {{(pid=69227) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 555.144419] env[69227]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 555.647755] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Getting list of instances from cluster (obj){ [ 555.647755] env[69227]: value = "domain-c8" [ 555.647755] env[69227]: _type = "ClusterComputeResource" [ 555.647755] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 555.648979] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2389e5e9-a8e7-48f9-8031-bfd1184a3bc1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.658283] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Got total of 0 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 555.658832] env[69227]: WARNING nova.virt.vmwareapi.driver [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 555.659335] env[69227]: INFO nova.virt.node [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Generated node identity 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b [ 555.659570] env[69227]: INFO nova.virt.node [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Wrote node identity 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b to /opt/stack/data/n-cpu-1/compute_id [ 556.161899] env[69227]: WARNING nova.compute.manager [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Compute nodes ['30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 557.167481] env[69227]: INFO nova.compute.manager [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 558.174732] env[69227]: WARNING nova.compute.manager [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 558.175074] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 558.175652] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 558.175850] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 558.176018] env[69227]: DEBUG nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 558.176937] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9159356-8f64-4d64-8f84-3fba86ab0732 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.185214] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f42a3c3-2d2d-41de-a080-33b19df7e3ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.198779] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b621dca-8e3a-4b01-ae1f-b6cbbfdc6250 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.205129] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6aacf52-13d9-4b17-83e5-6aed765b93a7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.234230] env[69227]: DEBUG nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180972MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 558.234386] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 558.234567] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 558.739031] env[69227]: WARNING nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] No compute node record for cpu-1:30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b could not be found. [ 559.242985] env[69227]: INFO nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b [ 560.750284] env[69227]: DEBUG nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 560.751127] env[69227]: DEBUG nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 560.908241] env[69227]: INFO nova.scheduler.client.report [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] [req-1b3fcd6e-22d8-4b5b-8585-285bc1bce946] Created resource provider record via placement API for resource provider with UUID 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 560.925015] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0daf7c3-5d6a-4872-b758-f0c590660130 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.931738] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-437075c9-cc16-4b9e-89e8-78540b5ac7af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.964358] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2cf9f6-8e09-4d48-bae6-ba49407bc0a6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.972260] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea838ac4-278c-4213-89db-7e3d178057df {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.986837] env[69227]: DEBUG nova.compute.provider_tree [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 561.523971] env[69227]: DEBUG nova.scheduler.client.report [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Updated inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 561.523971] env[69227]: DEBUG nova.compute.provider_tree [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Updating resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b generation from 0 to 1 during operation: update_inventory {{(pid=69227) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 561.523971] env[69227]: DEBUG nova.compute.provider_tree [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 561.574041] env[69227]: DEBUG nova.compute.provider_tree [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Updating resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b generation from 1 to 2 during operation: update_traits {{(pid=69227) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 562.076291] env[69227]: DEBUG nova.compute.resource_tracker [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 562.076587] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.842s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 562.076671] env[69227]: DEBUG nova.service [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Creating RPC server for service compute {{(pid=69227) start /opt/stack/nova/nova/service.py:186}} [ 562.090256] env[69227]: DEBUG nova.service [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] Join ServiceGroup membership for this service compute {{(pid=69227) start /opt/stack/nova/nova/service.py:203}} [ 562.090446] env[69227]: DEBUG nova.servicegroup.drivers.db [None req-3b92f1c1-c30d-496c-a71e-df19c6c137ed None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69227) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 570.091941] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.595307] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 570.595307] env[69227]: value = "domain-c8" [ 570.595307] env[69227]: _type = "ClusterComputeResource" [ 570.595307] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 570.596522] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a09a17-bb03-4e39-b78b-d09b729a8208 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.605164] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 0 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 570.605393] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.605692] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 570.605692] env[69227]: value = "domain-c8" [ 570.605692] env[69227]: _type = "ClusterComputeResource" [ 570.605692] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 570.606549] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a53119-7626-480d-8fb2-fb1c1605e959 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.614042] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 0 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 591.279995] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "16959790-5fdc-4304-b889-45bb6b015c3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.280382] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "16959790-5fdc-4304-b889-45bb6b015c3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.784219] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 592.329875] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.329875] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.332219] env[69227]: INFO nova.compute.claims [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.383272] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6142f17-2aa5-46a5-b8e8-acad09362996 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.394267] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f0b9a-7efd-4647-b5ef-a47deda72de6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.433209] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f778aa06-5fe8-48ea-99a6-d7d3b9117bb3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.440865] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c52df10-985f-4938-a6d4-0ee3fab1a77c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.454564] env[69227]: DEBUG nova.compute.provider_tree [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 593.965664] env[69227]: DEBUG nova.scheduler.client.report [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 594.474243] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.144s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 594.474849] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 594.981943] env[69227]: DEBUG nova.compute.utils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 594.983741] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 594.987957] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 595.494496] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 596.508277] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 597.240502] env[69227]: DEBUG nova.policy [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27795fceb2624b8593e780509ac2f566', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca9300e057e345c183dcf36fdc2d752f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 597.337659] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 597.337659] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 597.337659] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 597.341292] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 597.341763] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 597.342072] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 597.342719] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 597.344492] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 597.344597] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 597.344883] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 597.345344] env[69227]: DEBUG nova.virt.hardware [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.347146] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c74c6c-d314-42f0-944d-8f08ccc07b01 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.357733] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29df35bb-c4c6-4b06-ae31-37c4389704ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.377155] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3904606-8d2c-4bbc-b019-118011e1ccbb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.267652] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "31371445-428d-4236-a833-f07122553cfa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 598.267966] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.496661] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Successfully created port: a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.770597] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 599.301822] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.302221] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 599.304423] env[69227]: INFO nova.compute.claims [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.379418] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d5b6fe-61d4-4279-b44d-64d43ea60009 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.392021] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6bd260e-95af-444f-bbfe-8f9b0ce123b9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.425354] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05e71bce-67e4-4bec-bf3d-f34142b37bf6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.434619] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4b89e8-8ed9-4075-b136-959a66461fd2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.450030] env[69227]: DEBUG nova.compute.provider_tree [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.956817] env[69227]: DEBUG nova.scheduler.client.report [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 601.380170] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "bca4ebfd-1581-4873-b992-98a9982a7063" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.380484] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "bca4ebfd-1581-4873-b992-98a9982a7063" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.462748] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.160s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.463977] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 601.771694] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Successfully updated port: a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 601.883767] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 601.973106] env[69227]: DEBUG nova.compute.utils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.973106] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.973106] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 602.030654] env[69227]: DEBUG nova.policy [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8742346517442b7a67bc738499fb988', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9bb92ec8f47e46abac9fae14ebac27cb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 602.277194] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.277356] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquired lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.277595] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.345388] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully created port: 164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.420372] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.420372] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.421530] env[69227]: INFO nova.compute.claims [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.477496] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 602.565300] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "b9eb341d-bf37-4848-90b0-a774eb382f72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.565434] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "b9eb341d-bf37-4848-90b0-a774eb382f72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.881509] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.958972] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully created port: 8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.073234] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 603.490638] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 603.529875] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.529875] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.529875] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.529875] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.530063] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.530063] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.530063] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.530063] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.530063] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.530204] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.530204] env[69227]: DEBUG nova.virt.hardware [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.531321] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea94e35-b0c5-40f6-8795-01d3f5498fc8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.537131] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceab99e6-1eb5-4028-beee-e82d09738bfd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.546210] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b658b87f-538f-4ae7-aee6-76fe252bde74 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.565414] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ca00d2-8442-4c33-b23b-20c5db9881a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.597898] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Updating instance_info_cache with network_info: [{"id": "a34c33c2-1da0-482b-862b-94404266a360", "address": "fa:16:3e:1a:0e:45", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34c33c2-1d", "ovs_interfaceid": "a34c33c2-1da0-482b-862b-94404266a360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.599571] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180ede62-635b-4a89-9488-8ff9988c2038 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.613154] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5df66fca-063a-4b9d-8f3c-96e1d89e34ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.633020] env[69227]: DEBUG nova.compute.provider_tree [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.633020] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.112018] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Releasing lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.112018] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Instance network_info: |[{"id": "a34c33c2-1da0-482b-862b-94404266a360", "address": "fa:16:3e:1a:0e:45", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34c33c2-1d", "ovs_interfaceid": "a34c33c2-1da0-482b-862b-94404266a360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 604.112333] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:0e:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a34c33c2-1da0-482b-862b-94404266a360', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.125101] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.126232] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully created port: 44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 604.129486] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a022856-11da-4f2b-8730-ae005cb93b64 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.135083] env[69227]: DEBUG nova.scheduler.client.report [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 604.154602] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Created folder: OpenStack in parent group-v4. [ 604.154924] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating folder: Project (ca9300e057e345c183dcf36fdc2d752f). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.155352] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d023ecb8-4261-4066-bb6b-1561a21b356e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.169728] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Created folder: Project (ca9300e057e345c183dcf36fdc2d752f) in parent group-v694623. [ 604.169863] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating folder: Instances. Parent ref: group-v694624. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 604.173289] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0e616bd-fac2-4885-8e4f-0e6e6ecbeefa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.179860] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Created folder: Instances in parent group-v694624. [ 604.180118] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.180376] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 604.180576] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7dafd7e-b334-448c-9b35-8e6c47fdaa70 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.206771] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.206771] env[69227]: value = "task-3474962" [ 604.206771] env[69227]: _type = "Task" [ 604.206771] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.216872] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474962, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.237437] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "1547effe-8061-4aba-8e1f-302617eee198" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.237718] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "1547effe-8061-4aba-8e1f-302617eee198" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.643790] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.225s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.644500] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 604.648160] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.017s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.655016] env[69227]: INFO nova.compute.claims [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 604.723529] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474962, 'name': CreateVM_Task, 'duration_secs': 0.352642} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.725427] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 604.731231] env[69227]: DEBUG nova.compute.manager [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Received event network-vif-plugged-a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 604.731442] env[69227]: DEBUG oslo_concurrency.lockutils [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] Acquiring lock "16959790-5fdc-4304-b889-45bb6b015c3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.731643] env[69227]: DEBUG oslo_concurrency.lockutils [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] Lock "16959790-5fdc-4304-b889-45bb6b015c3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.731804] env[69227]: DEBUG oslo_concurrency.lockutils [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] Lock "16959790-5fdc-4304-b889-45bb6b015c3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.731979] env[69227]: DEBUG nova.compute.manager [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] No waiting events found dispatching network-vif-plugged-a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 604.732159] env[69227]: WARNING nova.compute.manager [req-61d09327-eb9c-486e-85f4-04112bdfbbb4 req-1e9552e9-d038-4fd3-80df-d9c42a2c2268 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Received unexpected event network-vif-plugged-a34c33c2-1da0-482b-862b-94404266a360 for instance with vm_state building and task_state spawning. [ 604.743027] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.834925] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "a1095b15-f871-4dd2-9712-330d26ba4143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.835910] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.892394] env[69227]: DEBUG oslo_vmware.service [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72198860-2a46-4fb0-b1f4-1406eb7f15c2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.903053] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.903053] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.903479] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 604.904389] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-419f75ba-d086-4f15-b305-f803b7adc7ed {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.910526] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Waiting for the task: (returnval){ [ 604.910526] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f2f0dc-3ea7-18cd-9194-3d2f34fec141" [ 604.910526] env[69227]: _type = "Task" [ 604.910526] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.926794] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f2f0dc-3ea7-18cd-9194-3d2f34fec141, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.032970] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "ddea4fd2-96b9-445c-939d-92c247247452" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.034537] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "ddea4fd2-96b9-445c-939d-92c247247452" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.034881] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.035509] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.160414] env[69227]: DEBUG nova.compute.utils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.163882] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 605.163882] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 605.277861] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.322365] env[69227]: DEBUG nova.policy [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17e13cdf466248b8be8676fb46f2cc37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3fada40ad194197be6741a998d2c1de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 605.339810] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.423056] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.423593] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.423898] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.424135] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.424530] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.425092] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d79bf7b0-e423-4099-933c-0c67397c6e69 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.445679] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.445886] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 605.446724] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7269b6-73e9-4e22-8f9a-c013b05c647e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.454439] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f850c563-91ab-4076-b340-e56ff2f5599c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.462869] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Waiting for the task: (returnval){ [ 605.462869] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bb01f5-6467-809c-891e-379feebcdd72" [ 605.462869] env[69227]: _type = "Task" [ 605.462869] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.469072] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bb01f5-6467-809c-891e-379feebcdd72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.538353] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.544548] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 605.669792] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 605.841907] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603efbff-c462-42e3-931a-3002da9aba5e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.854465] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a760aa-5f19-4a6a-a0f0-bba834512bf3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.888828] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 605.890404] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94f1e19-4090-455f-942d-e422810d5758 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.897979] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b96dfe-2599-4d0d-9059-ab5ef4050f02 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.917077] env[69227]: DEBUG nova.compute.provider_tree [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.972511] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 605.973018] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating directory with path [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.973202] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c56bb9e-633c-4b92-b684-287cd935998f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.993212] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Created directory with path [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.993434] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Fetch image to [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 605.993632] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 605.994990] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09690034-9bad-493a-bffc-ffeedc3471bc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.001826] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b707cf1a-caab-43d7-9558-697a1e7630de {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.012983] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82047c5-9631-4452-961b-1f87989682ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.054499] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfa816c-a4ac-40d8-aa0f-01ebad3ec6d0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.061720] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e2ee2b14-2c9b-4151-8b90-903e2e9688fa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.079038] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.087299] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 606.099223] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.177900] env[69227]: DEBUG oslo_vmware.rw_handles [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 606.248218] env[69227]: DEBUG oslo_vmware.rw_handles [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 606.248872] env[69227]: DEBUG oslo_vmware.rw_handles [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 606.420273] env[69227]: DEBUG nova.scheduler.client.report [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 606.680433] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 606.719185] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 606.719185] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 606.719185] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.719330] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 606.719330] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.719999] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 606.720752] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 606.723772] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 606.723772] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 606.723772] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 606.723772] env[69227]: DEBUG nova.virt.hardware [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 606.723772] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f980b-cffb-474f-9b94-64d6ed3e31a1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.731047] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Successfully created port: bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 606.736575] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355d7c01-389a-4486-8fd7-bb264d2f5903 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.929718] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.930314] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 606.937055] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.658s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.937055] env[69227]: INFO nova.compute.claims [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.270624] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully updated port: 164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.392467] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "4005bdf5-3826-4214-9fa6-f794c4f043df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.392693] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.437824] env[69227]: DEBUG nova.compute.utils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 607.439152] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 607.439377] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 607.761515] env[69227]: DEBUG nova.policy [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c1e25a48e1e43e8b8bc9d130db54f2a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5e9336df0cc4902a5f4cfe725c66aac', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 607.901942] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 607.944645] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 608.152911] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c06627c-8458-4f95-85fa-b8a6a3204b82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.161976] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eceae57-983b-42ae-8d0d-03b0f478a8d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.209181] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c69d22a-3e64-447d-8ee0-43d6b9baa42f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.218911] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ad0b16-3294-4b0c-9e63-134e67cbe4e3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.237578] env[69227]: DEBUG nova.compute.provider_tree [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 608.426167] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.544682] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.545135] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.742143] env[69227]: DEBUG nova.scheduler.client.report [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 608.833185] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "334575bf-5847-41d5-85bd-e72f08a80a59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.833636] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.958405] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 609.001431] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 609.001431] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 609.001431] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 609.001573] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 609.001573] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 609.001573] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 609.001573] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 609.001573] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 609.001715] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 609.001715] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 609.001715] env[69227]: DEBUG nova.virt.hardware [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 609.002495] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9116bdb1-dd8e-4abe-a577-7f835efdd4ca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.014879] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f73107-1e45-42ea-8a6c-e2884c9b601c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.050983] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 609.112036] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Successfully created port: 8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 609.120672] env[69227]: DEBUG nova.compute.manager [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Received event network-changed-a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 609.120849] env[69227]: DEBUG nova.compute.manager [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Refreshing instance network info cache due to event network-changed-a34c33c2-1da0-482b-862b-94404266a360. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 609.121394] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Acquiring lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.121394] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Acquired lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.121394] env[69227]: DEBUG nova.network.neutron [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Refreshing network info cache for port a34c33c2-1da0-482b-862b-94404266a360 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 609.250524] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.315s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.251538] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 609.259210] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.367s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.260703] env[69227]: INFO nova.compute.claims [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 609.338843] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Successfully updated port: bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.587699] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.769694] env[69227]: DEBUG nova.compute.utils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 609.775361] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 609.775538] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 609.843744] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.843744] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.843744] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 610.057106] env[69227]: DEBUG nova.network.neutron [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Updated VIF entry in instance network info cache for port a34c33c2-1da0-482b-862b-94404266a360. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 610.057106] env[69227]: DEBUG nova.network.neutron [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Updating instance_info_cache with network_info: [{"id": "a34c33c2-1da0-482b-862b-94404266a360", "address": "fa:16:3e:1a:0e:45", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.194", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa34c33c2-1d", "ovs_interfaceid": "a34c33c2-1da0-482b-862b-94404266a360", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.059329] env[69227]: DEBUG nova.policy [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '49952e6da7a145648e1c6e751f20e5bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90b998d0cedc45568981778b4988c721', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 610.284652] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 610.401802] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 610.438033] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.444097] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.444353] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 610.444470] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 610.564547] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Releasing lock "refresh_cache-16959790-5fdc-4304-b889-45bb6b015c3c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.567866] env[69227]: DEBUG nova.compute.manager [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-vif-plugged-164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 610.567866] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Acquiring lock "31371445-428d-4236-a833-f07122553cfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.567866] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.567866] env[69227]: DEBUG oslo_concurrency.lockutils [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.567866] env[69227]: DEBUG nova.compute.manager [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] No waiting events found dispatching network-vif-plugged-164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.568581] env[69227]: WARNING nova.compute.manager [req-9e39ca2f-b52e-4939-b609-706302918ed3 req-e8513e25-56dc-4568-baa5-747a8b409e19 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received unexpected event network-vif-plugged-164b22da-7a9d-4f15-8ede-2b38d1ba6a7c for instance with vm_state building and task_state spawning. [ 610.568884] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1214d1-f795-4ae4-ab83-dfb405054424 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.580684] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc732a4-e1fb-41e8-bbd8-675bea46c22c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.620131] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a1e623-312b-4f6a-9cb8-b95e42c48748 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.628488] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2420f7b-0d5a-4dbd-80fa-32f54f253436 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.651397] env[69227]: DEBUG nova.compute.provider_tree [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 610.765290] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Updating instance_info_cache with network_info: [{"id": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "address": "fa:16:3e:d6:81:e2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb704264-88", "ovs_interfaceid": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.825960] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.826254] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.849885] env[69227]: DEBUG nova.compute.manager [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Received event network-vif-plugged-bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 610.850200] env[69227]: DEBUG oslo_concurrency.lockutils [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] Acquiring lock "bca4ebfd-1581-4873-b992-98a9982a7063-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.850340] env[69227]: DEBUG oslo_concurrency.lockutils [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] Lock "bca4ebfd-1581-4873-b992-98a9982a7063-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.850790] env[69227]: DEBUG oslo_concurrency.lockutils [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] Lock "bca4ebfd-1581-4873-b992-98a9982a7063-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.851033] env[69227]: DEBUG nova.compute.manager [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] No waiting events found dispatching network-vif-plugged-bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 610.851260] env[69227]: WARNING nova.compute.manager [req-5b9d2dce-5e78-429d-92cc-d3014f894b84 req-223096fd-c464-499a-bb6e-8b112dee7397 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Received unexpected event network-vif-plugged-bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 for instance with vm_state building and task_state spawning. [ 610.954832] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.954832] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 31371445-428d-4236-a833-f07122553cfa] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.954832] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.955019] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.955081] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.955204] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 610.955338] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 610.955580] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.955964] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.957498] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.958335] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.958335] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.958335] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 610.958923] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 610.958923] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.153929] env[69227]: DEBUG nova.scheduler.client.report [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 611.272454] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.272454] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Instance network_info: |[{"id": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "address": "fa:16:3e:d6:81:e2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb704264-88", "ovs_interfaceid": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 611.272695] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:81:e2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.283152] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating folder: Project (f3fada40ad194197be6741a998d2c1de). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.283454] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5637b9e8-778e-4a30-801e-1ed309b28dda {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.297566] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 611.303898] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created folder: Project (f3fada40ad194197be6741a998d2c1de) in parent group-v694623. [ 611.303898] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating folder: Instances. Parent ref: group-v694627. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.304377] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1791acad-4a4a-4eda-ba94-1c17bd9404cb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.315587] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created folder: Instances in parent group-v694627. [ 611.315587] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.315587] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 611.315817] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc93d320-dc17-4ff3-9b60-29c4810cc9ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.339858] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.339858] env[69227]: value = "task-3474965" [ 611.339858] env[69227]: _type = "Task" [ 611.339858] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.343080] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 611.343080] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 611.343080] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.343223] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 611.343223] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.343223] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 611.343223] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 611.343223] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 611.343353] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 611.343462] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 611.343628] env[69227]: DEBUG nova.virt.hardware [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.344529] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743bd011-8b14-4799-b918-56ebf39dfcd3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.359841] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474965, 'name': CreateVM_Task} progress is 6%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.361991] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16af1cdf-c309-48a1-80a1-7abd61efd2e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.380152] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully updated port: 8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.445715] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Successfully created port: 99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.466815] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.661624] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.406s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.662020] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 611.665591] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.586s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.667013] env[69227]: INFO nova.compute.claims [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 611.833679] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Successfully updated port: 8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 611.855541] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474965, 'name': CreateVM_Task, 'duration_secs': 0.351983} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.855812] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 611.856446] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.857056] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.857056] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 611.857187] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2260c17b-ff66-4f96-a570-8711c1a6a5b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.862351] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 611.862351] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52244247-5913-5426-36c2-8067f559ec2f" [ 611.862351] env[69227]: _type = "Task" [ 611.862351] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.877929] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52244247-5913-5426-36c2-8067f559ec2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.173253] env[69227]: DEBUG nova.compute.utils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 612.181388] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 612.181388] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 612.324381] env[69227]: DEBUG nova.policy [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8bc936e55b94c59aa51596e0963dffd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92616e4226c44225a5a509e9e6602cae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 612.343894] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.343997] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquired lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.344166] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.354651] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.354651] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.376614] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.376614] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.377036] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.532416] env[69227]: DEBUG nova.compute.manager [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-changed-164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 612.532563] env[69227]: DEBUG nova.compute.manager [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing instance network info cache due to event network-changed-164b22da-7a9d-4f15-8ede-2b38d1ba6a7c. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 612.532786] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Acquiring lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.532957] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Acquired lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.533461] env[69227]: DEBUG nova.network.neutron [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing network info cache for port 164b22da-7a9d-4f15-8ede-2b38d1ba6a7c {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 612.683956] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 612.890404] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Successfully created port: 09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.971345] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.007849] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671bbeab-f0fb-45cb-8932-c1cc508893e0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.017455] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8b552a-7694-480f-9dbd-8285b547abdd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.058671] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75160a4e-a930-4347-bd9d-2f32428265a4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.068439] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87570ad1-3660-4216-8155-a1d88d41e446 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.087358] env[69227]: DEBUG nova.compute.provider_tree [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 613.220956] env[69227]: DEBUG nova.network.neutron [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.481269] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Successfully updated port: 99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 613.590441] env[69227]: DEBUG nova.scheduler.client.report [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 613.696490] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 613.723924] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 613.723924] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 613.723924] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 613.724228] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 613.724228] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 613.724228] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 613.724228] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 613.724228] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 613.724551] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 613.724988] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 613.725124] env[69227]: DEBUG nova.virt.hardware [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 613.726434] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0e27d6-a3ca-4d67-abca-34dd4e2563d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.738523] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821a73e6-4f70-4765-820e-908197baae2f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.825336] env[69227]: DEBUG nova.network.neutron [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.862986] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Updating instance_info_cache with network_info: [{"id": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "address": "fa:16:3e:8c:81:78", "network": {"id": "e01840b3-8812-45b1-9f83-b977cbba97c3", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1360331924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5e9336df0cc4902a5f4cfe725c66aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d600d7c-3c", "ovs_interfaceid": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.984276] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.984540] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquired lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.984735] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.058208] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Successfully updated port: 44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.096312] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.431s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.097254] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 614.100471] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.002s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.101834] env[69227]: INFO nova.compute.claims [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.205944] env[69227]: DEBUG nova.compute.manager [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Received event network-changed-bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 614.205999] env[69227]: DEBUG nova.compute.manager [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Refreshing instance network info cache due to event network-changed-bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 614.206229] env[69227]: DEBUG oslo_concurrency.lockutils [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] Acquiring lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.206388] env[69227]: DEBUG oslo_concurrency.lockutils [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] Acquired lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.206551] env[69227]: DEBUG nova.network.neutron [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Refreshing network info cache for port bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 614.333552] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Releasing lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.333552] env[69227]: DEBUG nova.compute.manager [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-vif-plugged-8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 614.333552] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Acquiring lock "31371445-428d-4236-a833-f07122553cfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.333552] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.333552] env[69227]: DEBUG oslo_concurrency.lockutils [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 614.333837] env[69227]: DEBUG nova.compute.manager [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] No waiting events found dispatching network-vif-plugged-8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 614.333837] env[69227]: WARNING nova.compute.manager [req-165f4b1c-cd74-4930-821d-efc38a9b7c56 req-e6cb2f6b-183d-4dc9-bb41-7bafc35466bc service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received unexpected event network-vif-plugged-8f8a4ac8-561f-4b4f-8488-16e5535ae973 for instance with vm_state building and task_state spawning. [ 614.368094] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Releasing lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.368743] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Instance network_info: |[{"id": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "address": "fa:16:3e:8c:81:78", "network": {"id": "e01840b3-8812-45b1-9f83-b977cbba97c3", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1360331924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5e9336df0cc4902a5f4cfe725c66aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d600d7c-3c", "ovs_interfaceid": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 614.370527] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:81:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '605f83bd-808c-4b54-922e-54b14690987a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 614.384689] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Creating folder: Project (d5e9336df0cc4902a5f4cfe725c66aac). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.384689] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d2ed2aa-e5eb-4d4d-9692-06f7aa739b38 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.396775] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Created folder: Project (d5e9336df0cc4902a5f4cfe725c66aac) in parent group-v694623. [ 614.397772] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Creating folder: Instances. Parent ref: group-v694630. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 614.397772] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8ffba00a-2108-4aa7-a7b6-b581ad25a025 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.406993] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Created folder: Instances in parent group-v694630. [ 614.407660] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.408056] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 614.408126] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-129848a3-f5d5-4e05-a4f3-56b128ed314b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.430535] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 614.430535] env[69227]: value = "task-3474968" [ 614.430535] env[69227]: _type = "Task" [ 614.430535] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.443374] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474968, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.559552] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "11aaee2b-b47e-4078-9674-f46a5f7878ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.559789] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "11aaee2b-b47e-4078-9674-f46a5f7878ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.560242] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.560368] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquired lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.563909] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 614.602203] env[69227]: DEBUG nova.compute.utils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 614.605646] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 614.605817] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 614.619122] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.680261] env[69227]: DEBUG nova.policy [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17e13cdf466248b8be8676fb46f2cc37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3fada40ad194197be6741a998d2c1de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 614.946243] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474968, 'name': CreateVM_Task} progress is 99%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.107324] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 615.168760] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.256473] env[69227]: DEBUG nova.network.neutron [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Updated VIF entry in instance network info cache for port bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 615.256473] env[69227]: DEBUG nova.network.neutron [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Updating instance_info_cache with network_info: [{"id": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "address": "fa:16:3e:d6:81:e2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbb704264-88", "ovs_interfaceid": "bb704264-88ad-4b42-8ea2-7ba6a4e2f2d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.294388] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Successfully updated port: 09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 615.315372] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Updating instance_info_cache with network_info: [{"id": "99016257-fc9d-4663-b0db-188e2a2b1d63", "address": "fa:16:3e:b4:e4:1e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99016257-fc", "ovs_interfaceid": "99016257-fc9d-4663-b0db-188e2a2b1d63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.398380] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Successfully created port: 27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.421090] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a2a850-3135-442f-b7b9-22f51d20dbd6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.433469] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e8d972-e5fd-4894-8cbf-665ed504ce6f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.445774] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474968, 'name': CreateVM_Task} progress is 99%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.484791] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd3c639-4482-4dcb-ab8e-cff5968f8c5b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.489396] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e10cb24-b881-43ad-bc14-50efc73334a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.503262] env[69227]: DEBUG nova.compute.provider_tree [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 615.757991] env[69227]: DEBUG oslo_concurrency.lockutils [req-bb6d341a-383f-4f10-bcc7-08468a271db6 req-baa0e2d9-cce2-4f14-a598-cddc128457e1 service nova] Releasing lock "refresh_cache-bca4ebfd-1581-4873-b992-98a9982a7063" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.794201] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.794338] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.794490] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.825097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Releasing lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.825097] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Instance network_info: |[{"id": "99016257-fc9d-4663-b0db-188e2a2b1d63", "address": "fa:16:3e:b4:e4:1e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99016257-fc", "ovs_interfaceid": "99016257-fc9d-4663-b0db-188e2a2b1d63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 615.825388] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:e4:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '99016257-fc9d-4663-b0db-188e2a2b1d63', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 615.838685] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Creating folder: Project (90b998d0cedc45568981778b4988c721). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 615.842131] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9604ac20-c163-4f47-8dd4-70e38cbd47e7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.857037] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Created folder: Project (90b998d0cedc45568981778b4988c721) in parent group-v694623. [ 615.857712] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Creating folder: Instances. Parent ref: group-v694633. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 615.858431] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce0b0eb4-178c-4b08-8721-5454f71528f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.866751] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Created folder: Instances in parent group-v694633. [ 615.866751] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.866751] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 615.866961] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de0e081c-b004-4c94-ba03-71454b72aaaf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.892017] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 615.892017] env[69227]: value = "task-3474971" [ 615.892017] env[69227]: _type = "Task" [ 615.892017] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.900109] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474971, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.905132] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-changed-8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 615.905330] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing instance network info cache due to event network-changed-8f8a4ac8-561f-4b4f-8488-16e5535ae973. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 615.905514] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.951150] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474968, 'name': CreateVM_Task, 'duration_secs': 1.355091} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.951373] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 615.952083] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.952248] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.952647] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 615.952933] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d0f9e2e-f21d-461f-8196-b98e71549e23 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.962444] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Waiting for the task: (returnval){ [ 615.962444] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d92a71-54ca-6ceb-486c-eec631ac38de" [ 615.962444] env[69227]: _type = "Task" [ 615.962444] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.972804] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d92a71-54ca-6ceb-486c-eec631ac38de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.007088] env[69227]: DEBUG nova.scheduler.client.report [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 616.100795] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [{"id": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "address": "fa:16:3e:41:60:82", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap164b22da-7a", "ovs_interfaceid": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "address": "fa:16:3e:2d:a6:ac", "network": {"id": "fd88f9aa-42ee-41a6-b121-c110b969da60", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1390828251", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f8a4ac8-56", "ovs_interfaceid": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "address": "fa:16:3e:03:46:18", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44dde563-d8", "ovs_interfaceid": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.122432] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 616.151569] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.152109] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.152325] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.152582] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.152764] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.152972] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.153264] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.153487] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.153877] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.153943] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.154171] env[69227]: DEBUG nova.virt.hardware [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.155118] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e22b174-17da-40f1-b702-849fd5d7777d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.168534] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c823103-a7be-4ef9-a369-be0cdbfae791 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.358136] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.367073] env[69227]: DEBUG oslo_concurrency.lockutils [None req-bd292eca-8d23-4d81-8924-ccb62a284456 tempest-ServerExternalEventsTest-1140727613 tempest-ServerExternalEventsTest-1140727613-project-member] Acquiring lock "4053c7e0-9f0d-4acf-90be-0dab69650838" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.367341] env[69227]: DEBUG oslo_concurrency.lockutils [None req-bd292eca-8d23-4d81-8924-ccb62a284456 tempest-ServerExternalEventsTest-1140727613 tempest-ServerExternalEventsTest-1140727613-project-member] Lock "4053c7e0-9f0d-4acf-90be-0dab69650838" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.406382] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474971, 'name': CreateVM_Task, 'duration_secs': 0.31875} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.406567] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 616.407592] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.475953] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.476456] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 616.476708] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.476917] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.477248] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 616.477534] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d0a1da6-acc9-4b51-b5fc-988f567e9338 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.482579] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Waiting for the task: (returnval){ [ 616.482579] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b750da-d887-3ad2-f5a2-09e046aadc56" [ 616.482579] env[69227]: _type = "Task" [ 616.482579] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.490826] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b750da-d887-3ad2-f5a2-09e046aadc56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.512392] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.512924] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 616.516406] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.091s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.517814] env[69227]: INFO nova.compute.claims [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.603182] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Releasing lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.603583] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance network_info: |[{"id": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "address": "fa:16:3e:41:60:82", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap164b22da-7a", "ovs_interfaceid": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "address": "fa:16:3e:2d:a6:ac", "network": {"id": "fd88f9aa-42ee-41a6-b121-c110b969da60", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1390828251", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f8a4ac8-56", "ovs_interfaceid": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "address": "fa:16:3e:03:46:18", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44dde563-d8", "ovs_interfaceid": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 616.603878] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquired lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.604136] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing network info cache for port 8f8a4ac8-561f-4b4f-8488-16e5535ae973 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 616.606584] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:60:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '164b22da-7a9d-4f15-8ede-2b38d1ba6a7c', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:a6:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f8a4ac8-561f-4b4f-8488-16e5535ae973', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:46:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e7a44713-0af1-486e-bc0d-00e03a769fa4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '44dde563-d819-40e7-bd72-50e5f7d3af3b', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.621079] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Creating folder: Project (9bb92ec8f47e46abac9fae14ebac27cb). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.621998] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1b7dce89-d714-48e0-98bb-ab09a83ac420 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.626936] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Updating instance_info_cache with network_info: [{"id": "09a38d74-3ac3-4b2f-8610-febcedd91586", "address": "fa:16:3e:8d:a1:77", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a38d74-3a", "ovs_interfaceid": "09a38d74-3ac3-4b2f-8610-febcedd91586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.634912] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Created folder: Project (9bb92ec8f47e46abac9fae14ebac27cb) in parent group-v694623. [ 616.634912] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Creating folder: Instances. Parent ref: group-v694636. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 616.634912] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4b669cd-7b20-4c52-b4f9-a6ba213c0037 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.648654] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Created folder: Instances in parent group-v694636. [ 616.648903] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.649103] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31371445-428d-4236-a833-f07122553cfa] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 616.649370] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d6c86b7-369f-4bce-8ef9-1b26cd1a5d64 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.675178] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.675178] env[69227]: value = "task-3474974" [ 616.675178] env[69227]: _type = "Task" [ 616.675178] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.683864] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474974, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.908167] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3bc9c384-cad6-4782-947d-19679b9ae4fa tempest-ServerActionsTestJSON-1367138755 tempest-ServerActionsTestJSON-1367138755-project-member] Acquiring lock "1140bb76-ac01-4d31-996b-55e15f547497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.908324] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3bc9c384-cad6-4782-947d-19679b9ae4fa tempest-ServerActionsTestJSON-1367138755 tempest-ServerActionsTestJSON-1367138755-project-member] Lock "1140bb76-ac01-4d31-996b-55e15f547497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.995304] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.995572] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 616.995802] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.025483] env[69227]: DEBUG nova.compute.utils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.027480] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Not allocating networking since 'none' was specified. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 617.129363] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.129743] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance network_info: |[{"id": "09a38d74-3ac3-4b2f-8610-febcedd91586", "address": "fa:16:3e:8d:a1:77", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a38d74-3a", "ovs_interfaceid": "09a38d74-3ac3-4b2f-8610-febcedd91586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 617.130980] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:a1:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '09a38d74-3ac3-4b2f-8610-febcedd91586', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.137772] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating folder: Project (92616e4226c44225a5a509e9e6602cae). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.138081] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7e262c9-f6c1-4d7c-a5d9-cbbaac9f932a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.148491] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created folder: Project (92616e4226c44225a5a509e9e6602cae) in parent group-v694623. [ 617.148491] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating folder: Instances. Parent ref: group-v694639. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.150334] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8154cd89-2214-44f3-9ed4-691a2b11daaf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.159569] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created folder: Instances in parent group-v694639. [ 617.159803] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 617.159990] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.160209] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df534d07-55c6-4955-abd9-4904a67880c0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.180432] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.180432] env[69227]: value = "task-3474977" [ 617.180432] env[69227]: _type = "Task" [ 617.180432] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.188029] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474974, 'name': CreateVM_Task, 'duration_secs': 0.403978} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.188029] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31371445-428d-4236-a833-f07122553cfa] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.188029] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.188029] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.188327] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.191040] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d74f9a4-e465-42e1-be21-54ef2575e019 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.192606] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474977, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.199017] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for the task: (returnval){ [ 617.199017] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]520d06f7-cbb3-30ea-d75c-f9e7ebdf8863" [ 617.199017] env[69227]: _type = "Task" [ 617.199017] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.204934] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]520d06f7-cbb3-30ea-d75c-f9e7ebdf8863, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.533025] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 617.570196] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Updated VIF entry in instance network info cache for port 8f8a4ac8-561f-4b4f-8488-16e5535ae973. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 617.572133] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [{"id": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "address": "fa:16:3e:41:60:82", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap164b22da-7a", "ovs_interfaceid": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "address": "fa:16:3e:2d:a6:ac", "network": {"id": "fd88f9aa-42ee-41a6-b121-c110b969da60", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1390828251", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f8a4ac8-56", "ovs_interfaceid": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "address": "fa:16:3e:03:46:18", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44dde563-d8", "ovs_interfaceid": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.653254] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Successfully updated port: 27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 617.691823] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474977, 'name': CreateVM_Task, 'duration_secs': 0.432081} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.692409] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.693052] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.711079] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.711837] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.711837] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.712414] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.712739] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.712997] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe203b40-8983-4f13-b7a2-2f8539d61e63 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.718969] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 617.718969] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d08113-eae1-8024-ad7a-4faa97b54637" [ 617.718969] env[69227]: _type = "Task" [ 617.718969] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.728115] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d08113-eae1-8024-ad7a-4faa97b54637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.828182] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d289a6d2-1f40-4d3d-bd0f-9731000be9f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.838983] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dacb4db2-ed67-40e6-82af-a65f0e9156ea {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.845840] env[69227]: DEBUG nova.compute.manager [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Received event network-vif-plugged-27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 617.846147] env[69227]: DEBUG oslo_concurrency.lockutils [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] Acquiring lock "ddea4fd2-96b9-445c-939d-92c247247452-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.846340] env[69227]: DEBUG oslo_concurrency.lockutils [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] Lock "ddea4fd2-96b9-445c-939d-92c247247452-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 617.846473] env[69227]: DEBUG oslo_concurrency.lockutils [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] Lock "ddea4fd2-96b9-445c-939d-92c247247452-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.846641] env[69227]: DEBUG nova.compute.manager [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] No waiting events found dispatching network-vif-plugged-27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 617.846881] env[69227]: WARNING nova.compute.manager [req-2f8afd52-c52a-4a3f-8f45-30e1b7b01ab7 req-f68ab20d-db7b-4c23-9ca2-b7eca94aa513 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Received unexpected event network-vif-plugged-27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a for instance with vm_state building and task_state spawning. [ 617.883682] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f46d5bd-0b3f-4f50-969f-373904775943 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.892377] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd542f36-de6f-497d-b9d0-cc4ea3ee9913 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.908565] env[69227]: DEBUG nova.compute.provider_tree [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.075807] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Releasing lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.075807] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Received event network-vif-plugged-8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 618.075807] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "b9eb341d-bf37-4848-90b0-a774eb382f72-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.075807] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "b9eb341d-bf37-4848-90b0-a774eb382f72-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.075807] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "b9eb341d-bf37-4848-90b0-a774eb382f72-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.076155] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] No waiting events found dispatching network-vif-plugged-8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.076483] env[69227]: WARNING nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Received unexpected event network-vif-plugged-8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd for instance with vm_state building and task_state spawning. [ 618.076837] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Received event network-changed-8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 618.077157] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Refreshing instance network info cache due to event network-changed-8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 618.077779] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.077779] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquired lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.079193] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Refreshing network info cache for port 8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 618.156695] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.156972] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.157022] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.232856] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.233113] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.233353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.413705] env[69227]: DEBUG nova.scheduler.client.report [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 618.544375] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 618.577943] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.578156] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.578360] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.578733] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.578733] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.579026] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.579235] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.579375] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.579961] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.579961] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.579961] env[69227]: DEBUG nova.virt.hardware [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.581093] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bed9dde-c3fd-4c71-b84b-ca719e6cd31c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.591903] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23fac6d-f94c-4f3c-93a5-621b26ab9a0f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.605845] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance VIF info [] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.611619] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Creating folder: Project (3c4c284db18e43248f5d35447921a54f). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 618.611915] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-23cd4901-6439-4e77-8503-6b8ba65bdb38 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.621753] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Created folder: Project (3c4c284db18e43248f5d35447921a54f) in parent group-v694623. [ 618.621947] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Creating folder: Instances. Parent ref: group-v694642. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 618.622203] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37f83fb3-555d-4346-8000-94090c6c4b1c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.630944] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Created folder: Instances in parent group-v694642. [ 618.631078] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 618.631279] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 618.631621] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85e0f7f9-a846-42c8-b218-fc29879a3e19 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.650975] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.650975] env[69227]: value = "task-3474980" [ 618.650975] env[69227]: _type = "Task" [ 618.650975] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.656068] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474980, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.921946] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.922545] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 618.925594] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.338s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.927390] env[69227]: INFO nova.compute.claims [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.933608] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.952527] env[69227]: DEBUG nova.compute.manager [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Received event network-vif-plugged-09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 618.952962] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Acquiring lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.953233] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.953471] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.953583] env[69227]: DEBUG nova.compute.manager [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] No waiting events found dispatching network-vif-plugged-09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.953749] env[69227]: WARNING nova.compute.manager [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Received unexpected event network-vif-plugged-09a38d74-3ac3-4b2f-8610-febcedd91586 for instance with vm_state building and task_state spawning. [ 618.953925] env[69227]: DEBUG nova.compute.manager [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Received event network-changed-09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 618.954103] env[69227]: DEBUG nova.compute.manager [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Refreshing instance network info cache due to event network-changed-09a38d74-3ac3-4b2f-8610-febcedd91586. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 618.954316] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Acquiring lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.954491] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Acquired lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.954618] env[69227]: DEBUG nova.network.neutron [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Refreshing network info cache for port 09a38d74-3ac3-4b2f-8610-febcedd91586 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 619.162072] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474980, 'name': CreateVM_Task, 'duration_secs': 0.285223} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.162358] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 619.162711] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.162872] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.163197] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 619.163452] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5319d235-6a12-4e10-90a4-320666fc4f11 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.171675] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for the task: (returnval){ [ 619.171675] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d37d8d-5188-8654-90c4-9d05364b6c0a" [ 619.171675] env[69227]: _type = "Task" [ 619.171675] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.183798] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d37d8d-5188-8654-90c4-9d05364b6c0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.301752] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Updating instance_info_cache with network_info: [{"id": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "address": "fa:16:3e:69:81:d9", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27e5cef3-c8", "ovs_interfaceid": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.395390] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Updated VIF entry in instance network info cache for port 8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 619.395728] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Updating instance_info_cache with network_info: [{"id": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "address": "fa:16:3e:8c:81:78", "network": {"id": "e01840b3-8812-45b1-9f83-b977cbba97c3", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1360331924-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5e9336df0cc4902a5f4cfe725c66aac", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d600d7c-3c", "ovs_interfaceid": "8d600d7c-3c1b-4af6-bdf1-8e0aa6f8fdfd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.435172] env[69227]: DEBUG nova.compute.utils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.436529] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 619.436700] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.611826] env[69227]: DEBUG nova.policy [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17e13cdf466248b8be8676fb46f2cc37', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3fada40ad194197be6741a998d2c1de', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 619.684663] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.684918] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.685167] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.807507] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.807896] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Instance network_info: |[{"id": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "address": "fa:16:3e:69:81:d9", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27e5cef3-c8", "ovs_interfaceid": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 619.809213] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:69:81:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.822224] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.822803] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 619.823039] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9d18da2-49ab-4f3f-ae22-82bcb27a2e6c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.849917] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.849917] env[69227]: value = "task-3474981" [ 619.849917] env[69227]: _type = "Task" [ 619.849917] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.858156] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474981, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.898485] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Releasing lock "refresh_cache-b9eb341d-bf37-4848-90b0-a774eb382f72" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.899128] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Received event network-vif-plugged-99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 619.899128] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "1547effe-8061-4aba-8e1f-302617eee198-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.899224] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "1547effe-8061-4aba-8e1f-302617eee198-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.900829] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "1547effe-8061-4aba-8e1f-302617eee198-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.900829] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] No waiting events found dispatching network-vif-plugged-99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 619.900829] env[69227]: WARNING nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Received unexpected event network-vif-plugged-99016257-fc9d-4663-b0db-188e2a2b1d63 for instance with vm_state building and task_state spawning. [ 619.900829] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Received event network-changed-99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 619.900829] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Refreshing instance network info cache due to event network-changed-99016257-fc9d-4663-b0db-188e2a2b1d63. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 619.901035] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.901279] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquired lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.901356] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Refreshing network info cache for port 99016257-fc9d-4663-b0db-188e2a2b1d63 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 619.943857] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 620.036872] env[69227]: DEBUG nova.network.neutron [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Updated VIF entry in instance network info cache for port 09a38d74-3ac3-4b2f-8610-febcedd91586. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 620.037748] env[69227]: DEBUG nova.network.neutron [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Updating instance_info_cache with network_info: [{"id": "09a38d74-3ac3-4b2f-8610-febcedd91586", "address": "fa:16:3e:8d:a1:77", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.22", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap09a38d74-3a", "ovs_interfaceid": "09a38d74-3ac3-4b2f-8610-febcedd91586", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 620.270904] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa7c337-44cd-48b7-b753-0760009ea02c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.279637] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ccf07c-ddd8-4cb3-b279-8430dd6e8087 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.319927] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b9509b-2a04-4f44-814c-38044b9c4e2f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.325197] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c0c545-3887-48b3-a474-05419a00df46 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.342762] env[69227]: DEBUG nova.compute.provider_tree [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.367672] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474981, 'name': CreateVM_Task, 'duration_secs': 0.359485} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.367869] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 620.368721] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.368721] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.369032] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.369298] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b20edac-e177-40f6-a6a5-8148468dd8c2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.375240] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 620.375240] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f56882-31db-f059-33ea-ecd3dba507d1" [ 620.375240] env[69227]: _type = "Task" [ 620.375240] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.386845] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f56882-31db-f059-33ea-ecd3dba507d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.541438] env[69227]: DEBUG oslo_concurrency.lockutils [req-571328b8-cc39-49ee-8953-26c7fc4c2719 req-fbddacdd-c734-4019-b5d7-03c2cb401ebe service nova] Releasing lock "refresh_cache-a1095b15-f871-4dd2-9712-330d26ba4143" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.709912] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Successfully created port: 493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.850155] env[69227]: DEBUG nova.scheduler.client.report [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 620.887479] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.887746] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.888129] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.960612] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 620.998978] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 620.999584] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 620.999584] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.000148] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.000148] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.000374] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.000776] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.001028] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.002026] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.002026] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.002026] env[69227]: DEBUG nova.virt.hardware [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.002820] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbafba5-146b-4e3d-a54f-c1e6d71f97ea {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.012712] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c0aef8-b7a3-4e9c-9717-21e671f3e646 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.040260] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Updated VIF entry in instance network info cache for port 99016257-fc9d-4663-b0db-188e2a2b1d63. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 621.040260] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Updating instance_info_cache with network_info: [{"id": "99016257-fc9d-4663-b0db-188e2a2b1d63", "address": "fa:16:3e:b4:e4:1e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.238", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap99016257-fc", "ovs_interfaceid": "99016257-fc9d-4663-b0db-188e2a2b1d63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.249261] env[69227]: DEBUG nova.compute.manager [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Received event network-changed-27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 621.249261] env[69227]: DEBUG nova.compute.manager [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Refreshing instance network info cache due to event network-changed-27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 621.249261] env[69227]: DEBUG oslo_concurrency.lockutils [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] Acquiring lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.249261] env[69227]: DEBUG oslo_concurrency.lockutils [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] Acquired lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.249261] env[69227]: DEBUG nova.network.neutron [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Refreshing network info cache for port 27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 621.358290] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.358885] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 621.361593] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.896s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.361819] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.362986] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 621.362986] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707db4a9-cd95-4158-b2e2-a2eca88d7b40 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.372047] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9af29d62-03bd-47f0-810c-88e2d3d887d8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.388762] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc602a0-d5f0-4540-a221-9da04af4b38b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.395923] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf62f83-af47-49de-84cc-54f5eac82786 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.431554] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180966MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 621.431792] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.432111] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.543031] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Releasing lock "refresh_cache-1547effe-8061-4aba-8e1f-302617eee198" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.543335] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-vif-plugged-44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 621.543710] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "31371445-428d-4236-a833-f07122553cfa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.543808] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.543907] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Lock "31371445-428d-4236-a833-f07122553cfa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.544087] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] No waiting events found dispatching network-vif-plugged-44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 621.544289] env[69227]: WARNING nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received unexpected event network-vif-plugged-44dde563-d819-40e7-bd72-50e5f7d3af3b for instance with vm_state building and task_state spawning. [ 621.544470] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Received event network-changed-44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 621.544623] env[69227]: DEBUG nova.compute.manager [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing instance network info cache due to event network-changed-44dde563-d819-40e7-bd72-50e5f7d3af3b. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 621.544800] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquiring lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.544932] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Acquired lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.545093] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Refreshing network info cache for port 44dde563-d819-40e7-bd72-50e5f7d3af3b {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 621.741691] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c181bdac-9eca-4d29-8ade-7d1b1f3f1dfb tempest-VolumesAssistedSnapshotsTest-2024810052 tempest-VolumesAssistedSnapshotsTest-2024810052-project-member] Acquiring lock "7e10b03c-76c8-4ff7-9b66-c578cbe28f2e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.741922] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c181bdac-9eca-4d29-8ade-7d1b1f3f1dfb tempest-VolumesAssistedSnapshotsTest-2024810052 tempest-VolumesAssistedSnapshotsTest-2024810052-project-member] Lock "7e10b03c-76c8-4ff7-9b66-c578cbe28f2e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.870237] env[69227]: DEBUG nova.compute.utils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 621.870237] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 621.870237] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 622.022356] env[69227]: DEBUG nova.policy [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6da7fbd7520f410985f286cc73723095', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0570eff5d5d42b1b041803f2ae43c5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 622.373677] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 622.464291] env[69227]: DEBUG nova.network.neutron [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Updated VIF entry in instance network info cache for port 27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 622.464459] env[69227]: DEBUG nova.network.neutron [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Updating instance_info_cache with network_info: [{"id": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "address": "fa:16:3e:69:81:d9", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap27e5cef3-c8", "ovs_interfaceid": "27e5cef3-c85f-4e0e-b6a0-98efdf5e4f3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.471217] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 16959790-5fdc-4304-b889-45bb6b015c3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.471311] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 31371445-428d-4236-a833-f07122553cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.471616] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bca4ebfd-1581-4873-b992-98a9982a7063 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.471616] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b9eb341d-bf37-4848-90b0-a774eb382f72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.471616] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1547effe-8061-4aba-8e1f-302617eee198 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.471749] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a1095b15-f871-4dd2-9712-330d26ba4143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.473040] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.473040] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.473040] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.473040] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 622.968184] env[69227]: DEBUG oslo_concurrency.lockutils [req-267f6cef-a87e-4b96-a5e0-eb30069d71cf req-aea39e8f-c71b-4ae6-af0c-72e9ff817932 service nova] Releasing lock "refresh_cache-ddea4fd2-96b9-445c-939d-92c247247452" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.969811] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Updated VIF entry in instance network info cache for port 44dde563-d819-40e7-bd72-50e5f7d3af3b. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 622.970281] env[69227]: DEBUG nova.network.neutron [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [{"id": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "address": "fa:16:3e:41:60:82", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.23", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap164b22da-7a", "ovs_interfaceid": "164b22da-7a9d-4f15-8ede-2b38d1ba6a7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "address": "fa:16:3e:2d:a6:ac", "network": {"id": "fd88f9aa-42ee-41a6-b121-c110b969da60", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1390828251", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.91", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f8a4ac8-56", "ovs_interfaceid": "8f8a4ac8-561f-4b4f-8488-16e5535ae973", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "address": "fa:16:3e:03:46:18", "network": {"id": "2244d146-888a-43af-b63e-00fd389cdf39", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1853290852", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.187", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9bb92ec8f47e46abac9fae14ebac27cb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e7a44713-0af1-486e-bc0d-00e03a769fa4", "external-id": "nsx-vlan-transportzone-420", "segmentation_id": 420, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap44dde563-d8", "ovs_interfaceid": "44dde563-d819-40e7-bd72-50e5f7d3af3b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.979124] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 623.384043] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.419018] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.419568] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.419877] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.420183] env[69227]: DEBUG nova.virt.hardware [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.421168] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a33c66ea-1e55-4b76-ada1-1398a933a0e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.430356] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09e85b28-9fff-4e39-9462-fff931ec62b2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.473429] env[69227]: DEBUG oslo_concurrency.lockutils [req-072dd3a6-800b-4bac-9acf-7ce7a137d204 req-76697d22-8a84-4ce8-bfdd-86e66ae8d250 service nova] Releasing lock "refresh_cache-31371445-428d-4236-a833-f07122553cfa" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 623.487332] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 623.709989] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Successfully created port: 02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 623.723008] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Successfully updated port: 493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.989548] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 624.226569] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.226569] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.226569] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 624.493469] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 11aaee2b-b47e-4078-9674-f46a5f7878ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 624.505866] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ffc9bc4-e90a-4bdb-b734-78e441856755 tempest-ServersWithSpecificFlavorTestJSON-1959141027 tempest-ServersWithSpecificFlavorTestJSON-1959141027-project-member] Acquiring lock "10382ebb-37bc-4d8d-9555-442cb78e0555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.505866] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ffc9bc4-e90a-4bdb-b734-78e441856755 tempest-ServersWithSpecificFlavorTestJSON-1959141027 tempest-ServersWithSpecificFlavorTestJSON-1959141027-project-member] Lock "10382ebb-37bc-4d8d-9555-442cb78e0555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.798889] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 625.000686] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4053c7e0-9f0d-4acf-90be-0dab69650838 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 625.010136] env[69227]: DEBUG nova.compute.manager [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Received event network-vif-plugged-493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 625.010136] env[69227]: DEBUG oslo_concurrency.lockutils [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] Acquiring lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.010136] env[69227]: DEBUG oslo_concurrency.lockutils [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.010317] env[69227]: DEBUG oslo_concurrency.lockutils [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.010400] env[69227]: DEBUG nova.compute.manager [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] No waiting events found dispatching network-vif-plugged-493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 625.011295] env[69227]: WARNING nova.compute.manager [req-2c0a7742-1e2e-40ec-b09a-2c49aedbd8f4 req-6eef107f-a1b1-444c-b273-eac4bb0ef22d service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Received unexpected event network-vif-plugged-493cb5e3-fd33-4e3b-8685-a8cf09dbf639 for instance with vm_state building and task_state spawning. [ 625.184312] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Updating instance_info_cache with network_info: [{"id": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "address": "fa:16:3e:89:35:d2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493cb5e3-fd", "ovs_interfaceid": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.505691] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1140bb76-ac01-4d31-996b-55e15f547497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 625.693458] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.693458] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance network_info: |[{"id": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "address": "fa:16:3e:89:35:d2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493cb5e3-fd", "ovs_interfaceid": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 625.693458] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:35:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '52f465cb-7418-4172-bd7d-aec00abeb692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '493cb5e3-fd33-4e3b-8685-a8cf09dbf639', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.702250] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.702497] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 625.702723] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8d69319b-5d63-4c37-b9ca-534bf6e3770b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.730220] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.730220] env[69227]: value = "task-3474982" [ 625.730220] env[69227]: _type = "Task" [ 625.730220] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.738894] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474982, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.008536] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 626.008536] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 626.008753] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 626.027118] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4189b9c-4c6a-468f-a7ac-8e78849873a5 tempest-ImagesOneServerTestJSON-2014017566 tempest-ImagesOneServerTestJSON-2014017566-project-member] Acquiring lock "017c5882-2f2e-43e0-947a-6996bbdf73d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.027118] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4189b9c-4c6a-468f-a7ac-8e78849873a5 tempest-ImagesOneServerTestJSON-2014017566 tempest-ImagesOneServerTestJSON-2014017566-project-member] Lock "017c5882-2f2e-43e0-947a-6996bbdf73d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.181684] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6c4d79b-c7ab-49b9-8410-c803255d3715 tempest-ServersV294TestFqdnHostnames-639727780 tempest-ServersV294TestFqdnHostnames-639727780-project-member] Acquiring lock "30ba5d34-7069-4cb5-8292-faa20327a662" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.181923] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6c4d79b-c7ab-49b9-8410-c803255d3715 tempest-ServersV294TestFqdnHostnames-639727780 tempest-ServersV294TestFqdnHostnames-639727780-project-member] Lock "30ba5d34-7069-4cb5-8292-faa20327a662" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.240474] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474982, 'name': CreateVM_Task, 'duration_secs': 0.342209} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.244435] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 626.244772] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.245048] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.246297] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.246297] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d332c6c-13b7-4a66-aa08-b81fce3fca61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.253525] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 626.253525] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]525971f3-2934-b623-ea4a-7a7f920cc68a" [ 626.253525] env[69227]: _type = "Task" [ 626.253525] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.268071] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.268321] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.268524] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.316411] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Successfully updated port: 02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 626.429368] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edad6422-6942-4c4e-93d4-a9e652a8399b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.437967] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2286ad31-c84a-4c4b-9245-2002d51a34ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.480695] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6fa335-bdf3-4612-877a-ff6419b9044b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.488679] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2105cd13-dc7b-491a-aa69-de3c9e3908e9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.504143] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.821530] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.821821] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.821821] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 627.008987] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 627.391341] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.517436] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 627.517436] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.085s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.709973] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Updating instance_info_cache with network_info: [{"id": "02022d5d-d685-4612-ac4e-703b9a2adaed", "address": "fa:16:3e:e9:8f:5c", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02022d5d-d6", "ovs_interfaceid": "02022d5d-d685-4612-ac4e-703b9a2adaed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.212986] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.213354] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance network_info: |[{"id": "02022d5d-d685-4612-ac4e-703b9a2adaed", "address": "fa:16:3e:e9:8f:5c", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02022d5d-d6", "ovs_interfaceid": "02022d5d-d685-4612-ac4e-703b9a2adaed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 628.213785] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:8f:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02022d5d-d685-4612-ac4e-703b9a2adaed', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.221583] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating folder: Project (e0570eff5d5d42b1b041803f2ae43c5e). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.221877] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-83073914-da70-4700-b6ce-858075d15e85 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.232517] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created folder: Project (e0570eff5d5d42b1b041803f2ae43c5e) in parent group-v694623. [ 628.232709] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating folder: Instances. Parent ref: group-v694647. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.233035] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8696640e-5340-4aa7-aec3-b6d1cf9f61d1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.244568] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created folder: Instances in parent group-v694647. [ 628.244813] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 628.245008] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 628.245794] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b211d2eb-a7d3-4d32-a09a-aa72059b1c51 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.274317] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.274317] env[69227]: value = "task-3474985" [ 628.274317] env[69227]: _type = "Task" [ 628.274317] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.282341] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474985, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.441553] env[69227]: DEBUG nova.compute.manager [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Received event network-vif-plugged-02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 628.441659] env[69227]: DEBUG oslo_concurrency.lockutils [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] Acquiring lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.442685] env[69227]: DEBUG oslo_concurrency.lockutils [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.442685] env[69227]: DEBUG oslo_concurrency.lockutils [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.443239] env[69227]: DEBUG nova.compute.manager [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] No waiting events found dispatching network-vif-plugged-02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 628.443441] env[69227]: WARNING nova.compute.manager [req-32447214-b731-43d1-9eac-52529d179782 req-7aa2c11d-e718-4cd5-a676-0826659dd6bf service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Received unexpected event network-vif-plugged-02022d5d-d685-4612-ac4e-703b9a2adaed for instance with vm_state building and task_state spawning. [ 628.560385] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a5d75b32-28b4-499b-8456-15c6d83d7cbb tempest-ServerMetadataTestJSON-2111490668 tempest-ServerMetadataTestJSON-2111490668-project-member] Acquiring lock "8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.560690] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a5d75b32-28b4-499b-8456-15c6d83d7cbb tempest-ServerMetadataTestJSON-2111490668 tempest-ServerMetadataTestJSON-2111490668-project-member] Lock "8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.786215] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474985, 'name': CreateVM_Task, 'duration_secs': 0.305084} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.786494] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 628.787228] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.787500] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.787864] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 628.788206] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38cf2e6f-f048-4c20-8041-9b5df18e36d8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.794474] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 628.794474] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524e0614-b503-c4fd-d330-f64055f55591" [ 628.794474] env[69227]: _type = "Task" [ 628.794474] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.802919] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524e0614-b503-c4fd-d330-f64055f55591, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.130486] env[69227]: DEBUG nova.compute.manager [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Received event network-changed-493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 629.130684] env[69227]: DEBUG nova.compute.manager [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Refreshing instance network info cache due to event network-changed-493cb5e3-fd33-4e3b-8685-a8cf09dbf639. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 629.130848] env[69227]: DEBUG oslo_concurrency.lockutils [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] Acquiring lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.130991] env[69227]: DEBUG oslo_concurrency.lockutils [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] Acquired lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.131433] env[69227]: DEBUG nova.network.neutron [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Refreshing network info cache for port 493cb5e3-fd33-4e3b-8685-a8cf09dbf639 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 629.306125] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.306484] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.306874] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.859071] env[69227]: DEBUG nova.network.neutron [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Updated VIF entry in instance network info cache for port 493cb5e3-fd33-4e3b-8685-a8cf09dbf639. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 629.859512] env[69227]: DEBUG nova.network.neutron [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Updating instance_info_cache with network_info: [{"id": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "address": "fa:16:3e:89:35:d2", "network": {"id": "837c7e24-f338-490b-b079-d3f5c5c24469", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1645086170-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3fada40ad194197be6741a998d2c1de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "52f465cb-7418-4172-bd7d-aec00abeb692", "external-id": "nsx-vlan-transportzone-895", "segmentation_id": 895, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap493cb5e3-fd", "ovs_interfaceid": "493cb5e3-fd33-4e3b-8685-a8cf09dbf639", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.363822] env[69227]: DEBUG oslo_concurrency.lockutils [req-c84bfc63-7910-4d69-8223-b38442caa109 req-862a853d-7eda-4bcf-b322-9e52ba3049f8 service nova] Releasing lock "refresh_cache-4005bdf5-3826-4214-9fa6-f794c4f043df" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.165780] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a696b0a8-4b00-4d1b-8fec-ef5c3373d5d4 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Acquiring lock "81b5e27a-8113-49fc-a845-3160f3bfb030" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.166079] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a696b0a8-4b00-4d1b-8fec-ef5c3373d5d4 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "81b5e27a-8113-49fc-a845-3160f3bfb030" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.318168] env[69227]: DEBUG nova.compute.manager [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Received event network-changed-02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 631.318168] env[69227]: DEBUG nova.compute.manager [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Refreshing instance network info cache due to event network-changed-02022d5d-d685-4612-ac4e-703b9a2adaed. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 631.318168] env[69227]: DEBUG oslo_concurrency.lockutils [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] Acquiring lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.318168] env[69227]: DEBUG oslo_concurrency.lockutils [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] Acquired lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.318168] env[69227]: DEBUG nova.network.neutron [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Refreshing network info cache for port 02022d5d-d685-4612-ac4e-703b9a2adaed {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 632.178130] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fcc98451-2c17-453c-aaa7-dc512b250cf8 tempest-InstanceActionsNegativeTestJSON-677229811 tempest-InstanceActionsNegativeTestJSON-677229811-project-member] Acquiring lock "3bfa20a9-e1b0-447f-8dcf-abb7bceee157" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.178540] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fcc98451-2c17-453c-aaa7-dc512b250cf8 tempest-InstanceActionsNegativeTestJSON-677229811 tempest-InstanceActionsNegativeTestJSON-677229811-project-member] Lock "3bfa20a9-e1b0-447f-8dcf-abb7bceee157" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.192102] env[69227]: DEBUG nova.network.neutron [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Updated VIF entry in instance network info cache for port 02022d5d-d685-4612-ac4e-703b9a2adaed. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 632.194393] env[69227]: DEBUG nova.network.neutron [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Updating instance_info_cache with network_info: [{"id": "02022d5d-d685-4612-ac4e-703b9a2adaed", "address": "fa:16:3e:e9:8f:5c", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02022d5d-d6", "ovs_interfaceid": "02022d5d-d685-4612-ac4e-703b9a2adaed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.703206] env[69227]: DEBUG oslo_concurrency.lockutils [req-0c2d85ec-a0a1-4a36-81a7-fe6de146b8e6 req-2e77cdb6-e06f-4fa6-94ce-758baf1bca93 service nova] Releasing lock "refresh_cache-02ec5165-3b99-4d81-a7d9-716e63076cb0" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.274630] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9892da88-9748-49e9-89af-c5e7c02eb2a3 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Acquiring lock "9e87431d-abde-4b1e-93a4-71d34e17308f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.275070] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9892da88-9748-49e9-89af-c5e7c02eb2a3 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "9e87431d-abde-4b1e-93a4-71d34e17308f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.602908] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ac0b5017-2849-49f5-82b6-9c90191368d3 tempest-ServersAdmin275Test-2002803998 tempest-ServersAdmin275Test-2002803998-project-member] Acquiring lock "4e6f82f5-9766-46fa-a28a-8f13c5c8fd58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.603213] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ac0b5017-2849-49f5-82b6-9c90191368d3 tempest-ServersAdmin275Test-2002803998 tempest-ServersAdmin275Test-2002803998-project-member] Lock "4e6f82f5-9766-46fa-a28a-8f13c5c8fd58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 640.047116] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e18e8e61-821e-4faa-86c1-8fe94fbe2f05 tempest-AttachInterfacesUnderV243Test-1792428759 tempest-AttachInterfacesUnderV243Test-1792428759-project-member] Acquiring lock "4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 640.047353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e18e8e61-821e-4faa-86c1-8fe94fbe2f05 tempest-AttachInterfacesUnderV243Test-1792428759 tempest-AttachInterfacesUnderV243Test-1792428759-project-member] Lock "4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.929455] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d213183b-15c8-4a78-b86b-4e5f65ff7c53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "4da35a64-ce89-4534-9af7-8eb8c1ec10ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.929455] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d213183b-15c8-4a78-b86b-4e5f65ff7c53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "4da35a64-ce89-4534-9af7-8eb8c1ec10ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.845915] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.847170] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.829332] env[69227]: DEBUG oslo_concurrency.lockutils [None req-00d873b0-15de-4ba1-ac33-803e1497fe7e tempest-ServersTestJSON-1429325674 tempest-ServersTestJSON-1429325674-project-member] Acquiring lock "877377d1-2c6c-4e43-b5db-5a4b6ceb99f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.829332] env[69227]: DEBUG oslo_concurrency.lockutils [None req-00d873b0-15de-4ba1-ac33-803e1497fe7e tempest-ServersTestJSON-1429325674 tempest-ServersTestJSON-1429325674-project-member] Lock "877377d1-2c6c-4e43-b5db-5a4b6ceb99f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.658947] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ab8f215-0de5-4b62-abbe-c7ab32e389fb tempest-FloatingIPsAssociationNegativeTestJSON-442599654 tempest-FloatingIPsAssociationNegativeTestJSON-442599654-project-member] Acquiring lock "f0454511-5b02-4c32-b630-09215a79f7a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.659215] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ab8f215-0de5-4b62-abbe-c7ab32e389fb tempest-FloatingIPsAssociationNegativeTestJSON-442599654 tempest-FloatingIPsAssociationNegativeTestJSON-442599654-project-member] Lock "f0454511-5b02-4c32-b630-09215a79f7a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.130453] env[69227]: WARNING oslo_vmware.rw_handles [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 654.130453] env[69227]: ERROR oslo_vmware.rw_handles [ 654.131075] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 654.132229] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 654.132485] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Copying Virtual Disk [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/978e7bd5-d2ad-45c0-8a27-f7f8a702e848/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 654.132764] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b20027f6-e63c-4048-beff-74711cd823aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.141408] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Waiting for the task: (returnval){ [ 654.141408] env[69227]: value = "task-3474986" [ 654.141408] env[69227]: _type = "Task" [ 654.141408] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.149629] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Task: {'id': task-3474986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.651291] env[69227]: DEBUG oslo_vmware.exceptions [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 654.654639] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.654884] env[69227]: ERROR nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.654884] env[69227]: Faults: ['InvalidArgument'] [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Traceback (most recent call last): [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] yield resources [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self.driver.spawn(context, instance, image_meta, [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self._fetch_image_if_missing(context, vi) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] image_cache(vi, tmp_image_ds_loc) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] vm_util.copy_virtual_disk( [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] session._wait_for_task(vmdk_copy_task) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return self.wait_for_task(task_ref) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return evt.wait() [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] result = hub.switch() [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return self.greenlet.switch() [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self.f(*self.args, **self.kw) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] raise exceptions.translate_fault(task_info.error) [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Faults: ['InvalidArgument'] [ 654.654884] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] [ 654.655827] env[69227]: INFO nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Terminating instance [ 654.656748] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.657200] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.658163] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 654.658163] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 654.658432] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e061cb61-40d9-40ec-b670-af1e5ccfecf1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.661991] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8a5610-b035-4db1-8239-68015f41bb47 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.669309] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 654.669309] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-220aa11d-146a-4fcf-9327-4d0bf6d170bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.672897] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.672897] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 654.672897] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b78e3a79-60fd-4a35-bee3-8abc6d3d0ec6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.678122] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 654.678122] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]526d60da-f637-5bd5-dee3-35df9f62fed4" [ 654.678122] env[69227]: _type = "Task" [ 654.678122] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.685516] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]526d60da-f637-5bd5-dee3-35df9f62fed4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.739292] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 654.740025] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 654.740025] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Deleting the datastore file [datastore2] 16959790-5fdc-4304-b889-45bb6b015c3c {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.740025] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c9979a3-1ce9-4107-bc1e-74e7d694f176 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.747215] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Waiting for the task: (returnval){ [ 654.747215] env[69227]: value = "task-3474988" [ 654.747215] env[69227]: _type = "Task" [ 654.747215] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.758101] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Task: {'id': task-3474988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.188187] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 655.188639] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 655.188639] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc63845f-946b-4dc6-86c6-0ac9f68f8779 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.200122] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 655.200354] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Fetch image to [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 655.200498] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 655.201268] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-688c579a-9bea-43c1-9a44-38701e69cb08 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.208255] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-208aecae-6ad3-4229-a828-abd708b62e1e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.217095] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53cd275-9c93-4fcb-b038-78f2d9c51081 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.252212] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0b7cc3f-df2d-4d0a-9dac-0eb95e4b8163 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.260900] env[69227]: DEBUG oslo_vmware.api [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Task: {'id': task-3474988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077482} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.261164] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3a51bb7d-32c2-4fd3-a775-2c9a0438c5e5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.262878] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 655.263085] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 655.263262] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 655.263638] env[69227]: INFO nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Took 0.61 seconds to destroy the instance on the hypervisor. [ 655.266649] env[69227]: DEBUG nova.compute.claims [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 655.266819] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.267054] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.281899] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 655.358165] env[69227]: DEBUG oslo_vmware.rw_handles [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 655.420624] env[69227]: DEBUG oslo_vmware.rw_handles [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 655.420848] env[69227]: DEBUG oslo_vmware.rw_handles [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 656.254018] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d90643-d9c6-40ed-8206-a99ec4aa4c82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.261055] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d135eddc-24e5-4235-b066-c3ecfba63fc2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.293225] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8a9b55-2d9b-4ec6-918d-ac5a408be1c9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.301186] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1f39f7-e95f-441e-8da6-9c6d0adc7427 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.315161] env[69227]: DEBUG nova.compute.provider_tree [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.818876] env[69227]: DEBUG nova.scheduler.client.report [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 657.323920] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.057s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.324537] env[69227]: ERROR nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 657.324537] env[69227]: Faults: ['InvalidArgument'] [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Traceback (most recent call last): [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self.driver.spawn(context, instance, image_meta, [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self._fetch_image_if_missing(context, vi) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] image_cache(vi, tmp_image_ds_loc) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] vm_util.copy_virtual_disk( [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] session._wait_for_task(vmdk_copy_task) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return self.wait_for_task(task_ref) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return evt.wait() [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] result = hub.switch() [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] return self.greenlet.switch() [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] self.f(*self.args, **self.kw) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] raise exceptions.translate_fault(task_info.error) [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Faults: ['InvalidArgument'] [ 657.324537] env[69227]: ERROR nova.compute.manager [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] [ 657.325596] env[69227]: DEBUG nova.compute.utils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 657.331195] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Build of instance 16959790-5fdc-4304-b889-45bb6b015c3c was re-scheduled: A specified parameter was not correct: fileType [ 657.331195] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 657.331613] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 657.331833] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 657.332011] env[69227]: DEBUG nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 657.332182] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 658.338275] env[69227]: DEBUG nova.network.neutron [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.843622] env[69227]: INFO nova.compute.manager [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] [instance: 16959790-5fdc-4304-b889-45bb6b015c3c] Took 1.51 seconds to deallocate network for instance. [ 659.882428] env[69227]: INFO nova.scheduler.client.report [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Deleted allocations for instance 16959790-5fdc-4304-b889-45bb6b015c3c [ 660.392744] env[69227]: DEBUG oslo_concurrency.lockutils [None req-25a2d0a2-18e0-4877-b2d7-d8f2be448cdc tempest-ServersAdminNegativeTestJSON-623982683 tempest-ServersAdminNegativeTestJSON-623982683-project-member] Lock "16959790-5fdc-4304-b889-45bb6b015c3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.112s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.580163] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e64cbcd7-b590-4874-bd80-0b7a04001fd8 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "bf04f0eb-5e79-4ddf-a654-24091deb7fbb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.580404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e64cbcd7-b590-4874-bd80-0b7a04001fd8 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "bf04f0eb-5e79-4ddf-a654-24091deb7fbb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.896835] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 661.419988] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.420347] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.421988] env[69227]: INFO nova.compute.claims [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.864961] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5240529-c91e-4cbc-be14-5c342859c64c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.872898] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-803589c3-1a73-4145-8690-b1a91b3610c7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.902539] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d106c04-55ec-4c3c-9e5e-0e45c4239018 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.909522] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0233f6e6-3f83-4db5-bec8-5ca1fa1430bf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.922484] env[69227]: DEBUG nova.compute.provider_tree [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.426160] env[69227]: DEBUG nova.scheduler.client.report [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 663.576934] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9070ddb7-33cc-419f-b1f5-fb092a6efd08 tempest-TenantUsagesTestJSON-1708237157 tempest-TenantUsagesTestJSON-1708237157-project-member] Acquiring lock "588397c9-19ac-4994-8c16-18c77be79411" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.576934] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9070ddb7-33cc-419f-b1f5-fb092a6efd08 tempest-TenantUsagesTestJSON-1708237157 tempest-TenantUsagesTestJSON-1708237157-project-member] Lock "588397c9-19ac-4994-8c16-18c77be79411" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.930582] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.510s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.931129] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.436571] env[69227]: DEBUG nova.compute.utils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.438105] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 664.438105] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.494243] env[69227]: DEBUG nova.policy [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01a629f205dd4397a0754da1a2be821d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8aad10774db04ffdb23dbac30be22786', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 664.856184] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Successfully created port: 431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.942064] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.953029] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 665.985443] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 665.985687] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 665.985843] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 665.986052] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 665.986177] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 665.986360] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 665.986520] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 665.986700] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 665.986868] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 665.987046] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 665.987226] env[69227]: DEBUG nova.virt.hardware [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 665.988112] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f19f53-eafd-4bf4-8279-b7fe1dd8bb97 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.996602] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2a94605-9567-49d2-b90c-adb72fa32e2f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.285831] env[69227]: DEBUG nova.compute.manager [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Received event network-vif-plugged-431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 666.286059] env[69227]: DEBUG oslo_concurrency.lockutils [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] Acquiring lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.286263] env[69227]: DEBUG oslo_concurrency.lockutils [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] Lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.286426] env[69227]: DEBUG oslo_concurrency.lockutils [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] Lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.286634] env[69227]: DEBUG nova.compute.manager [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] No waiting events found dispatching network-vif-plugged-431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 666.286812] env[69227]: WARNING nova.compute.manager [req-1d00cb2c-685d-4a07-a83a-9eae9d2f0678 req-21889050-884e-406e-9f18-d0d51092f7d0 service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Received unexpected event network-vif-plugged-431f1ec4-0bd4-437c-9bb8-54b893345c03 for instance with vm_state building and task_state spawning. [ 666.381787] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Successfully updated port: 431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.879513] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.879513] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquired lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.879909] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.408922] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.576324] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Updating instance_info_cache with network_info: [{"id": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "address": "fa:16:3e:da:1f:5a", "network": {"id": "07267d8d-4e94-4931-9d0b-acd60df180ce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1799916383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8aad10774db04ffdb23dbac30be22786", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431f1ec4-0b", "ovs_interfaceid": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.079032] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Releasing lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.079032] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance network_info: |[{"id": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "address": "fa:16:3e:da:1f:5a", "network": {"id": "07267d8d-4e94-4931-9d0b-acd60df180ce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1799916383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8aad10774db04ffdb23dbac30be22786", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431f1ec4-0b", "ovs_interfaceid": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 668.079032] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:1f:5a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8145bd31-c4a7-4828-8818-d065010c9565', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '431f1ec4-0bd4-437c-9bb8-54b893345c03', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.086350] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Creating folder: Project (8aad10774db04ffdb23dbac30be22786). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 668.086653] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8143aa59-6dca-45d9-89d6-ca7945e4b7b1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.098594] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Created folder: Project (8aad10774db04ffdb23dbac30be22786) in parent group-v694623. [ 668.098594] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Creating folder: Instances. Parent ref: group-v694650. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 668.098795] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51c365ec-e271-47ec-bd72-b70f9ed3dbb2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.108052] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Created folder: Instances in parent group-v694650. [ 668.108288] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.108473] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 668.108663] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a630260c-0982-49ff-b6f6-c04ffc1ed20d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.126405] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.126405] env[69227]: value = "task-3474991" [ 668.126405] env[69227]: _type = "Task" [ 668.126405] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.134135] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474991, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.348392] env[69227]: DEBUG nova.compute.manager [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Received event network-changed-431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 668.348569] env[69227]: DEBUG nova.compute.manager [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Refreshing instance network info cache due to event network-changed-431f1ec4-0bd4-437c-9bb8-54b893345c03. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 668.348778] env[69227]: DEBUG oslo_concurrency.lockutils [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] Acquiring lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.348919] env[69227]: DEBUG oslo_concurrency.lockutils [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] Acquired lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.349236] env[69227]: DEBUG nova.network.neutron [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Refreshing network info cache for port 431f1ec4-0bd4-437c-9bb8-54b893345c03 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 668.636076] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474991, 'name': CreateVM_Task, 'duration_secs': 0.291643} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.636355] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 668.636953] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.637118] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.637429] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.637720] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a63c17db-5e61-4473-ba8e-6dce71871bf1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.642108] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for the task: (returnval){ [ 668.642108] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bc2edd-6d3f-b0bb-dd42-919ce967415b" [ 668.642108] env[69227]: _type = "Task" [ 668.642108] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.649347] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bc2edd-6d3f-b0bb-dd42-919ce967415b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.061648] env[69227]: DEBUG nova.network.neutron [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Updated VIF entry in instance network info cache for port 431f1ec4-0bd4-437c-9bb8-54b893345c03. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 669.062009] env[69227]: DEBUG nova.network.neutron [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Updating instance_info_cache with network_info: [{"id": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "address": "fa:16:3e:da:1f:5a", "network": {"id": "07267d8d-4e94-4931-9d0b-acd60df180ce", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1799916383-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8aad10774db04ffdb23dbac30be22786", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8145bd31-c4a7-4828-8818-d065010c9565", "external-id": "nsx-vlan-transportzone-760", "segmentation_id": 760, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap431f1ec4-0b", "ovs_interfaceid": "431f1ec4-0bd4-437c-9bb8-54b893345c03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.152649] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.152807] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.153021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.565241] env[69227]: DEBUG oslo_concurrency.lockutils [req-947cd4f4-ded5-4c9d-9c52-8563a5c5be2c req-0854a73a-3fc5-4525-82ac-66aa34d2de1c service nova] Releasing lock "refresh_cache-334575bf-5847-41d5-85bd-e72f08a80a59" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.496315] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 687.496626] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.004180] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.004353] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 688.004479] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 688.511426] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 31371445-428d-4236-a833-f07122553cfa] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.511659] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.511705] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 688.514020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 688.514020] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514020] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514020] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514020] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514020] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514592] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 688.514592] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 688.514592] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.021017] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.021017] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.021017] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.021017] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 689.021017] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b858c3-e9e6-4b72-907a-9032d7842617 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.028025] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f86585c-73c3-40e7-b206-a91d082c8b58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.046410] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b54dd13f-d7de-4339-8121-e7e9f9d29aa4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.054020] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29724c9b-d729-407e-b755-9703fa1658f0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.082284] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180976MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 689.082432] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.082645] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 690.119191] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 31371445-428d-4236-a833-f07122553cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119445] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bca4ebfd-1581-4873-b992-98a9982a7063 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119483] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b9eb341d-bf37-4848-90b0-a774eb382f72 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119600] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1547effe-8061-4aba-8e1f-302617eee198 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119714] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a1095b15-f871-4dd2-9712-330d26ba4143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119826] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.119937] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.120060] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.120173] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.120283] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 690.622941] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.126168] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.629785] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 11aaee2b-b47e-4078-9674-f46a5f7878ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.133029] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4053c7e0-9f0d-4acf-90be-0dab69650838 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.636600] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1140bb76-ac01-4d31-996b-55e15f547497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.140129] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 693.642878] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 10382ebb-37bc-4d8d-9555-442cb78e0555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.145768] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 017c5882-2f2e-43e0-947a-6996bbdf73d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 694.648793] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 30ba5d34-7069-4cb5-8292-faa20327a662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.156545] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 695.660370] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 81b5e27a-8113-49fc-a845-3160f3bfb030 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.163207] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3bfa20a9-e1b0-447f-8dcf-abb7bceee157 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 696.666853] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9e87431d-abde-4b1e-93a4-71d34e17308f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.170746] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4e6f82f5-9766-46fa-a28a-8f13c5c8fd58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 697.674300] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.177747] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4da35a64-ce89-4534-9af7-8eb8c1ec10ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 698.680710] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7fa6db1-6f80-4f30-84b1-6179b0774889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.184165] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 877377d1-2c6c-4e43-b5db-5a4b6ceb99f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 699.686915] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0454511-5b02-4c32-b630-09215a79f7a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.190594] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bf04f0eb-5e79-4ddf-a654-24091deb7fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.694214] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 588397c9-19ac-4994-8c16-18c77be79411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 700.694977] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 700.694977] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 700.804493] env[69227]: WARNING oslo_vmware.rw_handles [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 700.804493] env[69227]: ERROR oslo_vmware.rw_handles [ 700.804493] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 700.807025] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 700.807025] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Copying Virtual Disk [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/5c617313-8a7e-4893-84cd-e358bf0db8e6/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 700.807192] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-63935278-1754-4861-8960-99d4ddbb9c7b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.815103] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 700.815103] env[69227]: value = "task-3474992" [ 700.815103] env[69227]: _type = "Task" [ 700.815103] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.823493] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3474992, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.057297] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559d3eaf-098f-42c4-b642-eeac319261ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.064761] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ef37437-ac7a-4d55-9440-fc5c486a86c9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.094638] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fd04be-d6a2-46fe-9c84-cb0a45b0316a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.101974] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3a0945-23fc-4d4d-9b86-388c9ac3102e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.114978] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.324930] env[69227]: DEBUG oslo_vmware.exceptions [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 701.326110] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.326110] env[69227]: ERROR nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.326110] env[69227]: Faults: ['InvalidArgument'] [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Traceback (most recent call last): [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] yield resources [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self.driver.spawn(context, instance, image_meta, [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self._vmops.spawn(context, instance, image_meta, injected_files, [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self._fetch_image_if_missing(context, vi) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] image_cache(vi, tmp_image_ds_loc) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] vm_util.copy_virtual_disk( [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] session._wait_for_task(vmdk_copy_task) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return self.wait_for_task(task_ref) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return evt.wait() [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] result = hub.switch() [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return self.greenlet.switch() [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self.f(*self.args, **self.kw) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] raise exceptions.translate_fault(task_info.error) [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Faults: ['InvalidArgument'] [ 701.326110] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] [ 701.326110] env[69227]: INFO nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Terminating instance [ 701.327565] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.327740] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.328340] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 701.328525] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 701.328741] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f697cc42-e548-492e-991f-42e3439852f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.331140] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487bcfd3-6900-4d32-a06b-47e523aed3bf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.337564] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 701.337765] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebd03028-33cd-48d9-95b3-a0431cee0c2d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.339734] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.339907] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 701.340816] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02d7a764-d318-4248-846c-fc59a4247438 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.346334] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Waiting for the task: (returnval){ [ 701.346334] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5290d051-da72-fe94-f926-c5eb44bf63b4" [ 701.346334] env[69227]: _type = "Task" [ 701.346334] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.353570] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5290d051-da72-fe94-f926-c5eb44bf63b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.419624] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 701.419838] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 701.420014] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleting the datastore file [datastore2] bca4ebfd-1581-4873-b992-98a9982a7063 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.420328] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20338a8d-c300-48d6-a83c-ace3241e9f1f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.426433] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 701.426433] env[69227]: value = "task-3474994" [ 701.426433] env[69227]: _type = "Task" [ 701.426433] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.434108] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3474994, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.617929] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 701.856775] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 701.857064] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Creating directory with path [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 701.857301] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ccfe6d55-1fcb-46ba-b98b-af90ca3be3a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.868273] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Created directory with path [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 701.868442] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Fetch image to [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 701.868616] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 701.869476] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ebf16a-52dc-48d8-90fe-b6fe05b01212 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.876703] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159a93c3-80bb-4452-a895-5f83010c5650 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.885562] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf9f16e-179e-4aec-a612-987fbc2cd026 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.916730] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e69165f-2b63-4516-ad5a-e0b940700a37 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.922231] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-198f6aec-3bfc-4ade-81dd-496c7fd983f2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.934133] env[69227]: DEBUG oslo_vmware.api [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3474994, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068212} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.934663] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.934663] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 701.934789] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 701.934876] env[69227]: INFO nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Took 0.61 seconds to destroy the instance on the hypervisor. [ 701.937021] env[69227]: DEBUG nova.compute.claims [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 701.937879] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.945510] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 702.008515] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 702.069587] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 702.069777] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 702.122828] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 702.123044] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 13.040s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.123320] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.186s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.941957] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-675ad7d4-beb9-4a77-9c3e-e05d2d43b55a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.951126] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d6a8d1-3d06-43d2-b674-2a1207c59479 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.980779] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104d211c-5d6c-4266-bd89-df1dab723747 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.987550] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5ad7d5-72e3-4b6f-b731-51e3690bbec2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.000650] env[69227]: DEBUG nova.compute.provider_tree [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.504169] env[69227]: DEBUG nova.scheduler.client.report [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 704.010035] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.886s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.010449] env[69227]: ERROR nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 704.010449] env[69227]: Faults: ['InvalidArgument'] [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Traceback (most recent call last): [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self.driver.spawn(context, instance, image_meta, [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self._fetch_image_if_missing(context, vi) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] image_cache(vi, tmp_image_ds_loc) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] vm_util.copy_virtual_disk( [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] session._wait_for_task(vmdk_copy_task) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return self.wait_for_task(task_ref) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return evt.wait() [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] result = hub.switch() [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] return self.greenlet.switch() [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] self.f(*self.args, **self.kw) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] raise exceptions.translate_fault(task_info.error) [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Faults: ['InvalidArgument'] [ 704.010449] env[69227]: ERROR nova.compute.manager [instance: bca4ebfd-1581-4873-b992-98a9982a7063] [ 704.011379] env[69227]: DEBUG nova.compute.utils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.012754] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Build of instance bca4ebfd-1581-4873-b992-98a9982a7063 was re-scheduled: A specified parameter was not correct: fileType [ 704.012754] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 704.013168] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 704.013341] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 704.013512] env[69227]: DEBUG nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 704.013675] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 704.791296] env[69227]: DEBUG nova.network.neutron [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.294429] env[69227]: INFO nova.compute.manager [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: bca4ebfd-1581-4873-b992-98a9982a7063] Took 1.28 seconds to deallocate network for instance. [ 706.328303] env[69227]: INFO nova.scheduler.client.report [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted allocations for instance bca4ebfd-1581-4873-b992-98a9982a7063 [ 706.837093] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ca43386-0014-455d-9c91-fd36e6041cce tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "bca4ebfd-1581-4873-b992-98a9982a7063" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.456s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.340012] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 707.863615] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.863812] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.865526] env[69227]: INFO nova.compute.claims [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 709.223863] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03de07de-1be4-47cd-bc60-92a2458269ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.231627] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-828e8919-8f22-4c95-9a81-4762c98eed9e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.261522] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01ad64a-eb6e-457e-a950-244a54687098 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.268782] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513a5c2f-9885-47c2-984b-267c5f22b7c6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.282628] env[69227]: DEBUG nova.compute.provider_tree [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.785811] env[69227]: DEBUG nova.scheduler.client.report [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 710.290773] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.427s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.291369] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 710.797266] env[69227]: DEBUG nova.compute.utils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.798647] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 710.798829] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.865163] env[69227]: DEBUG nova.policy [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8c7b47534bbf42b4a96d8a959a944b8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0378c4e70f1f4f1d82af401f3913a98e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 711.304740] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 711.452462] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Successfully created port: 1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.328273] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 712.355585] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 712.356187] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 712.356506] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 712.356815] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 712.357116] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 712.357396] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 712.357731] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 712.358065] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 712.358389] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 712.360191] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 712.360191] env[69227]: DEBUG nova.virt.hardware [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.360191] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03115fdb-8b21-4b8f-9440-bb34fe569408 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.369060] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d58c640-c460-4da0-8ee2-19e8b64876d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.901127] env[69227]: DEBUG nova.compute.manager [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Received event network-vif-plugged-1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 712.901127] env[69227]: DEBUG oslo_concurrency.lockutils [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] Acquiring lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 712.901127] env[69227]: DEBUG oslo_concurrency.lockutils [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 712.901127] env[69227]: DEBUG oslo_concurrency.lockutils [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.901127] env[69227]: DEBUG nova.compute.manager [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] No waiting events found dispatching network-vif-plugged-1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 712.901127] env[69227]: WARNING nova.compute.manager [req-dfd510f7-6e57-4e67-9229-a17f5ee5b63d req-403f85c6-2e12-4d7c-851d-7e797bbd62c7 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Received unexpected event network-vif-plugged-1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 for instance with vm_state building and task_state spawning. [ 713.009643] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Successfully updated port: 1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 713.513419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.513659] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquired lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.513853] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.046829] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.221563] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Updating instance_info_cache with network_info: [{"id": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "address": "fa:16:3e:a6:ca:a5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1791e99a-31", "ovs_interfaceid": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.724736] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Releasing lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.725112] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance network_info: |[{"id": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "address": "fa:16:3e:a6:ca:a5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1791e99a-31", "ovs_interfaceid": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 714.725525] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:ca:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1791e99a-317c-4d0f-8b0d-c9fb642f9ac5', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 714.733150] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Creating folder: Project (0378c4e70f1f4f1d82af401f3913a98e). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.733418] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57b76284-3bed-428b-a021-6cdce2e13e98 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.744641] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Created folder: Project (0378c4e70f1f4f1d82af401f3913a98e) in parent group-v694623. [ 714.744829] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Creating folder: Instances. Parent ref: group-v694653. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 714.745121] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1c45fb3-2c1c-4a1e-b29f-e63a6e29e616 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.753741] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Created folder: Instances in parent group-v694653. [ 714.753966] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.754162] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 714.754352] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2de5a121-c1f5-4599-bbec-96a8f219b709 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.772020] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 714.772020] env[69227]: value = "task-3474997" [ 714.772020] env[69227]: _type = "Task" [ 714.772020] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.779015] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474997, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.943362] env[69227]: DEBUG nova.compute.manager [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Received event network-changed-1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 714.943533] env[69227]: DEBUG nova.compute.manager [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Refreshing instance network info cache due to event network-changed-1791e99a-317c-4d0f-8b0d-c9fb642f9ac5. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 714.943743] env[69227]: DEBUG oslo_concurrency.lockutils [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] Acquiring lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.943886] env[69227]: DEBUG oslo_concurrency.lockutils [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] Acquired lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.944218] env[69227]: DEBUG nova.network.neutron [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Refreshing network info cache for port 1791e99a-317c-4d0f-8b0d-c9fb642f9ac5 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.281521] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3474997, 'name': CreateVM_Task, 'duration_secs': 0.281341} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.281699] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 715.282377] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.282545] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.282854] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.283114] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79fbd46c-5d47-4293-bd1b-610cb890bb6e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.287499] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for the task: (returnval){ [ 715.287499] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52638a41-e2c6-21d7-8da5-b355a574daf0" [ 715.287499] env[69227]: _type = "Task" [ 715.287499] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.294646] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52638a41-e2c6-21d7-8da5-b355a574daf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.643995] env[69227]: DEBUG nova.network.neutron [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Updated VIF entry in instance network info cache for port 1791e99a-317c-4d0f-8b0d-c9fb642f9ac5. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 715.644556] env[69227]: DEBUG nova.network.neutron [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Updating instance_info_cache with network_info: [{"id": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "address": "fa:16:3e:a6:ca:a5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.242", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1791e99a-31", "ovs_interfaceid": "1791e99a-317c-4d0f-8b0d-c9fb642f9ac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.799055] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.799961] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 715.799961] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.147401] env[69227]: DEBUG oslo_concurrency.lockutils [req-1556afa9-3a7a-4d3a-ad1d-543244b16355 req-d4c708a0-ad52-43b0-b9aa-72e5b4e5abf5 service nova] Releasing lock "refresh_cache-1724aea2-9fe0-4134-adcc-1a8baf512a80" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.597284] env[69227]: WARNING oslo_vmware.rw_handles [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 751.597284] env[69227]: ERROR oslo_vmware.rw_handles [ 751.597837] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 751.599673] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 751.599993] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Copying Virtual Disk [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/08057e54-8b17-4db3-a56c-81bd1476e0b4/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 751.600314] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cbdc0fdc-941b-4410-95ce-decebe1927c9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.608148] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Waiting for the task: (returnval){ [ 751.608148] env[69227]: value = "task-3474998" [ 751.608148] env[69227]: _type = "Task" [ 751.608148] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.615909] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Task: {'id': task-3474998, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.119173] env[69227]: DEBUG oslo_vmware.exceptions [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 752.119448] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.119999] env[69227]: ERROR nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 752.119999] env[69227]: Faults: ['InvalidArgument'] [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Traceback (most recent call last): [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] yield resources [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self.driver.spawn(context, instance, image_meta, [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self._fetch_image_if_missing(context, vi) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] image_cache(vi, tmp_image_ds_loc) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] vm_util.copy_virtual_disk( [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] session._wait_for_task(vmdk_copy_task) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return self.wait_for_task(task_ref) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return evt.wait() [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] result = hub.switch() [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return self.greenlet.switch() [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self.f(*self.args, **self.kw) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] raise exceptions.translate_fault(task_info.error) [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Faults: ['InvalidArgument'] [ 752.119999] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] [ 752.120846] env[69227]: INFO nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Terminating instance [ 752.121979] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.122206] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.122831] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 752.123041] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 752.123273] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44d1dd67-6672-45de-8403-9cbc12ca1cb3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.127118] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d14155-dba4-41cc-a38f-37fc78a1d495 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.133712] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 752.133922] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-845a0cb9-34cd-45b2-a45f-6facd27ca276 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.136227] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.136402] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 752.137388] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470b9085-e057-4e0c-b8f2-fb7bc66a06a7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.141920] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Waiting for the task: (returnval){ [ 752.141920] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52622675-b7a7-3f96-ab16-b87eedc64ece" [ 752.141920] env[69227]: _type = "Task" [ 752.141920] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.148778] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52622675-b7a7-3f96-ab16-b87eedc64ece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.206850] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 752.207107] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 752.207399] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Deleting the datastore file [datastore2] b9eb341d-bf37-4848-90b0-a774eb382f72 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 752.207722] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e462aed-7ed9-4796-9114-c88f118b39a1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.214371] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Waiting for the task: (returnval){ [ 752.214371] env[69227]: value = "task-3475000" [ 752.214371] env[69227]: _type = "Task" [ 752.214371] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.222035] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Task: {'id': task-3475000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.652097] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 752.652365] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Creating directory with path [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.652598] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9c69617-1281-4264-84e9-10dd1f2c7343 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.663586] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Created directory with path [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.663778] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Fetch image to [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 752.663942] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 752.664662] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020587a1-0898-4d83-a255-c23099e80fe1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.671170] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c583c9-8396-421f-93a8-71363ad81920 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.680087] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cfe3992-1632-4503-b9ed-6967689f34f9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.709975] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54cf278-37ab-4ca1-a13e-d58d1e281178 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.717864] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e93c4cfb-be82-4a82-9f2c-d816f0ee9abc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.723918] env[69227]: DEBUG oslo_vmware.api [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Task: {'id': task-3475000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.068534} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.724119] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.724288] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 752.724457] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 752.724624] env[69227]: INFO nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Took 0.60 seconds to destroy the instance on the hypervisor. [ 752.726628] env[69227]: DEBUG nova.compute.claims [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 752.726792] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.727024] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.738413] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 752.793143] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 752.854669] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 752.854669] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 753.568930] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705380c8-22ac-407c-9966-24e07f267074 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.576492] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58639a6-3e48-47f6-9a68-041cb6265d08 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.605518] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6132d2a0-5c7a-438b-b6b0-2f0b666e5644 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.611938] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af8f9be-3c01-46cb-87b8-13741d1e0d4e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.624403] env[69227]: DEBUG nova.compute.provider_tree [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.127760] env[69227]: DEBUG nova.scheduler.client.report [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 754.633488] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.906s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.634071] env[69227]: ERROR nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.634071] env[69227]: Faults: ['InvalidArgument'] [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Traceback (most recent call last): [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self.driver.spawn(context, instance, image_meta, [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self._fetch_image_if_missing(context, vi) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] image_cache(vi, tmp_image_ds_loc) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] vm_util.copy_virtual_disk( [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] session._wait_for_task(vmdk_copy_task) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return self.wait_for_task(task_ref) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return evt.wait() [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] result = hub.switch() [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] return self.greenlet.switch() [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] self.f(*self.args, **self.kw) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] raise exceptions.translate_fault(task_info.error) [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Faults: ['InvalidArgument'] [ 754.634071] env[69227]: ERROR nova.compute.manager [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] [ 754.634787] env[69227]: DEBUG nova.compute.utils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 754.636463] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Build of instance b9eb341d-bf37-4848-90b0-a774eb382f72 was re-scheduled: A specified parameter was not correct: fileType [ 754.636463] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 754.636843] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 754.637058] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 754.637197] env[69227]: DEBUG nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.637382] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 755.365330] env[69227]: DEBUG nova.network.neutron [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.868082] env[69227]: INFO nova.compute.manager [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] [instance: b9eb341d-bf37-4848-90b0-a774eb382f72] Took 1.23 seconds to deallocate network for instance. [ 756.905670] env[69227]: INFO nova.scheduler.client.report [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Deleted allocations for instance b9eb341d-bf37-4848-90b0-a774eb382f72 [ 757.414203] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3af633bd-5c5b-4f86-8004-d21e0309bd6a tempest-AttachInterfacesV270Test-819440827 tempest-AttachInterfacesV270Test-819440827-project-member] Lock "b9eb341d-bf37-4848-90b0-a774eb382f72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 154.849s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.917684] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 758.455134] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.455495] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.457020] env[69227]: INFO nova.compute.claims [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.843613] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d9fb53-8c48-4db5-8581-7930b3a24bd3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.851723] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171868ce-5034-412f-9aad-6ae4867a7aab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.884504] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509a7f4c-875d-404a-9192-a6d5e3f129c5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.892070] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc07bb33-a9b7-42b9-ac31-0c5226402c46 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.905734] env[69227]: DEBUG nova.compute.provider_tree [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.409065] env[69227]: DEBUG nova.scheduler.client.report [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 760.916581] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.461s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.917153] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 761.423440] env[69227]: DEBUG nova.compute.utils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 761.424790] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 761.424968] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 761.476629] env[69227]: DEBUG nova.policy [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e34ba3ddda941df98e49f0fa209ad2e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6db23ffa3ffb41438fe2c702f6692ead', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 761.823761] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Successfully created port: af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.931969] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 762.128855] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.128855] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.128855] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 762.128855] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 31371445-428d-4236-a833-f07122553cfa] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.634039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.635105] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.635448] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 762.635715] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 762.636044] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.636492] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.638399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.638399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.638399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.638399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.638399] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 762.638399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 762.945555] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 762.976053] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 762.976261] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 762.976429] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 762.976622] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 762.976822] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 762.976927] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 762.977290] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 762.979733] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 762.979733] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 762.979733] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 762.979733] env[69227]: DEBUG nova.virt.hardware [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 762.979733] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8242ad-4343-4350-8be9-3c7dd7163e45 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.988844] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63334e48-1592-432e-a33c-c34d53f4ecfd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.141155] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.141522] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.142181] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.142181] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 763.142892] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb22268-a393-466e-8017-0cd0a53e6fd9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.152632] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb45bd9-b077-4fee-9de4-f03f08cff5b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.169437] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f8f4fb-7c73-4cdd-aa3e-9d9c939ed45a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.178825] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cded079f-6650-4848-a1fb-7c5e418ef674 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.212743] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180926MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 763.212909] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.213155] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.374723] env[69227]: DEBUG nova.compute.manager [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Received event network-vif-plugged-af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 763.374927] env[69227]: DEBUG oslo_concurrency.lockutils [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] Acquiring lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.375084] env[69227]: DEBUG oslo_concurrency.lockutils [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.375258] env[69227]: DEBUG oslo_concurrency.lockutils [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.375445] env[69227]: DEBUG nova.compute.manager [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] No waiting events found dispatching network-vif-plugged-af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 763.375618] env[69227]: WARNING nova.compute.manager [req-160efc00-d07f-4876-8402-5f8b7e68c278 req-044ad9dd-c795-43dd-9401-32297ba7b63d service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Received unexpected event network-vif-plugged-af258a27-658a-4aff-bbe7-ff8245a4effb for instance with vm_state building and task_state spawning. [ 763.487030] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Successfully updated port: af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.988720] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.989024] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquired lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.989024] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 764.246261] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 31371445-428d-4236-a833-f07122553cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246261] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1547effe-8061-4aba-8e1f-302617eee198 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246261] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a1095b15-f871-4dd2-9712-330d26ba4143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246261] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246492] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246492] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246560] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246669] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246773] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.246877] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 764.543906] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 764.749458] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 11aaee2b-b47e-4078-9674-f46a5f7878ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 764.822511] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Updating instance_info_cache with network_info: [{"id": "af258a27-658a-4aff-bbe7-ff8245a4effb", "address": "fa:16:3e:aa:86:fc", "network": {"id": "574dad82-6114-4615-9872-187a914786c6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-102756313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6db23ffa3ffb41438fe2c702f6692ead", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf258a27-65", "ovs_interfaceid": "af258a27-658a-4aff-bbe7-ff8245a4effb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.254314] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4053c7e0-9f0d-4acf-90be-0dab69650838 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.325193] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Releasing lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.325555] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance network_info: |[{"id": "af258a27-658a-4aff-bbe7-ff8245a4effb", "address": "fa:16:3e:aa:86:fc", "network": {"id": "574dad82-6114-4615-9872-187a914786c6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-102756313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6db23ffa3ffb41438fe2c702f6692ead", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf258a27-65", "ovs_interfaceid": "af258a27-658a-4aff-bbe7-ff8245a4effb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 765.325965] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:86:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0f096917-a0cf-4add-a9d2-23ca1c723b3b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'af258a27-658a-4aff-bbe7-ff8245a4effb', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 765.333948] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Creating folder: Project (6db23ffa3ffb41438fe2c702f6692ead). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.334628] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b2816856-071b-467f-89ef-688ada34c561 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.347199] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Created folder: Project (6db23ffa3ffb41438fe2c702f6692ead) in parent group-v694623. [ 765.347469] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Creating folder: Instances. Parent ref: group-v694656. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 765.351017] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac1bf69b-52cd-408c-b517-e62656a48b5f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.357069] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Created folder: Instances in parent group-v694656. [ 765.357897] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 765.358145] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 765.358355] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b963f38-d445-4b44-84a9-242594898566 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.379505] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 765.379505] env[69227]: value = "task-3475003" [ 765.379505] env[69227]: _type = "Task" [ 765.379505] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.388230] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475003, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.425066] env[69227]: DEBUG nova.compute.manager [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Received event network-changed-af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 765.425384] env[69227]: DEBUG nova.compute.manager [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Refreshing instance network info cache due to event network-changed-af258a27-658a-4aff-bbe7-ff8245a4effb. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 765.425617] env[69227]: DEBUG oslo_concurrency.lockutils [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] Acquiring lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.425761] env[69227]: DEBUG oslo_concurrency.lockutils [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] Acquired lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.425920] env[69227]: DEBUG nova.network.neutron [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Refreshing network info cache for port af258a27-658a-4aff-bbe7-ff8245a4effb {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 765.758186] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1140bb76-ac01-4d31-996b-55e15f547497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 765.889433] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475003, 'name': CreateVM_Task, 'duration_secs': 0.340591} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.890374] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 765.891047] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.891047] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.891047] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 765.891433] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bdd6484b-3eab-4641-ae55-af6e10896f83 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.896073] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for the task: (returnval){ [ 765.896073] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b8e5cc-53aa-5083-29e7-5296f4a2ff8f" [ 765.896073] env[69227]: _type = "Task" [ 765.896073] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.904511] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b8e5cc-53aa-5083-29e7-5296f4a2ff8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.262542] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.302762] env[69227]: DEBUG nova.network.neutron [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Updated VIF entry in instance network info cache for port af258a27-658a-4aff-bbe7-ff8245a4effb. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 766.303167] env[69227]: DEBUG nova.network.neutron [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Updating instance_info_cache with network_info: [{"id": "af258a27-658a-4aff-bbe7-ff8245a4effb", "address": "fa:16:3e:aa:86:fc", "network": {"id": "574dad82-6114-4615-9872-187a914786c6", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-102756313-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6db23ffa3ffb41438fe2c702f6692ead", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0f096917-a0cf-4add-a9d2-23ca1c723b3b", "external-id": "nsx-vlan-transportzone-894", "segmentation_id": 894, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaf258a27-65", "ovs_interfaceid": "af258a27-658a-4aff-bbe7-ff8245a4effb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.406432] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.406685] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.407292] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.765527] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 10382ebb-37bc-4d8d-9555-442cb78e0555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 766.805610] env[69227]: DEBUG oslo_concurrency.lockutils [req-1e7731be-b76d-4df0-9337-8623bc02884a req-851d9603-3253-4c55-9aca-9f6420d44b80 service nova] Releasing lock "refresh_cache-f77adbc9-4a34-438e-8e0c-ddab0d1f4603" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 767.269975] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 017c5882-2f2e-43e0-947a-6996bbdf73d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 767.773941] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 30ba5d34-7069-4cb5-8292-faa20327a662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.276652] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 768.779575] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 81b5e27a-8113-49fc-a845-3160f3bfb030 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.283424] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3bfa20a9-e1b0-447f-8dcf-abb7bceee157 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.793979] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9e87431d-abde-4b1e-93a4-71d34e17308f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 769.937505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "be8dae7e-b829-455a-b8d3-73fb04c40128" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.937826] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.295247] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4e6f82f5-9766-46fa-a28a-8f13c5c8fd58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 770.798899] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.306219] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4da35a64-ce89-4534-9af7-8eb8c1ec10ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 771.809831] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7fa6db1-6f80-4f30-84b1-6179b0774889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.313599] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 877377d1-2c6c-4e43-b5db-5a4b6ceb99f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 772.817280] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0454511-5b02-4c32-b630-09215a79f7a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.320797] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bf04f0eb-5e79-4ddf-a654-24091deb7fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.823852] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 588397c9-19ac-4994-8c16-18c77be79411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 773.824245] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 773.824448] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 774.170041] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3251df76-ba62-43ce-87d0-9a10cf297037 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.177923] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d6a535c-93d1-4a74-94eb-05cb806c1b92 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.207690] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c62360-233b-4f3e-84e3-ffa18ab612b5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.214942] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd219ff-174e-44a1-88e9-6105ce0deeeb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.227988] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 774.731332] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 775.236865] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 775.237099] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.024s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.644567] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "31371445-428d-4236-a833-f07122553cfa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.188912] env[69227]: WARNING oslo_vmware.rw_handles [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 799.188912] env[69227]: ERROR oslo_vmware.rw_handles [ 799.189515] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 799.191113] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 799.191359] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Copying Virtual Disk [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/136a7837-972a-4108-8f3b-458f5b82f2bc/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 799.191645] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b97060c3-6798-4a36-bedf-5397c1d429f8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.199474] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Waiting for the task: (returnval){ [ 799.199474] env[69227]: value = "task-3475004" [ 799.199474] env[69227]: _type = "Task" [ 799.199474] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.206953] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Task: {'id': task-3475004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.709252] env[69227]: DEBUG oslo_vmware.exceptions [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 799.709493] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.710111] env[69227]: ERROR nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 799.710111] env[69227]: Faults: ['InvalidArgument'] [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] Traceback (most recent call last): [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] yield resources [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self.driver.spawn(context, instance, image_meta, [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self._vmops.spawn(context, instance, image_meta, injected_files, [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self._fetch_image_if_missing(context, vi) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] image_cache(vi, tmp_image_ds_loc) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] vm_util.copy_virtual_disk( [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] session._wait_for_task(vmdk_copy_task) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return self.wait_for_task(task_ref) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return evt.wait() [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] result = hub.switch() [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return self.greenlet.switch() [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self.f(*self.args, **self.kw) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] raise exceptions.translate_fault(task_info.error) [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] Faults: ['InvalidArgument'] [ 799.710111] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] [ 799.710880] env[69227]: INFO nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Terminating instance [ 799.712028] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.712145] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 799.712376] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-322ae43a-a624-452e-86a4-3bb62907fd19 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.714531] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 799.714739] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 799.715472] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab78b86-57fe-444a-bea4-a812c6899c8a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.722540] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 799.722794] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-824c7054-75e4-49a1-966c-3d3bd0a40cf1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.724972] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 799.725112] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 799.725987] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771bde51-2d1c-43be-9587-92e55a6caf95 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.730677] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for the task: (returnval){ [ 799.730677] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52310a58-3c8a-356a-adc5-c1a0f5bb866d" [ 799.730677] env[69227]: _type = "Task" [ 799.730677] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.741016] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52310a58-3c8a-356a-adc5-c1a0f5bb866d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.795499] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 799.795747] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 799.795881] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Deleting the datastore file [datastore2] 1547effe-8061-4aba-8e1f-302617eee198 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.796175] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccb4adf5-40f6-46a1-87a7-9c57c43046e8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.802632] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Waiting for the task: (returnval){ [ 799.802632] env[69227]: value = "task-3475006" [ 799.802632] env[69227]: _type = "Task" [ 799.802632] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.811189] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Task: {'id': task-3475006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.241292] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 800.241613] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Creating directory with path [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.241743] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d629a6e-83bd-4f7c-895e-37f8e4563372 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.292189] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Created directory with path [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.292396] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Fetch image to [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 800.292567] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 800.293339] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e6c637-fd3d-41ea-8bda-afce04ecb679 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.300291] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd61a446-b559-4b49-8113-1d4a146b31ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.311800] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ca513b-0eb6-4164-b1e2-a90edb42eeb3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.317959] env[69227]: DEBUG oslo_vmware.api [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Task: {'id': task-3475006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067062} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.318595] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.318654] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 800.318851] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 800.319015] env[69227]: INFO nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Took 0.60 seconds to destroy the instance on the hypervisor. [ 800.345327] env[69227]: DEBUG nova.compute.claims [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 800.345505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.345760] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.348775] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b479ab2c-6d6c-4ba9-ba38-b571523da183 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.354378] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-812b8178-1c91-44bc-9557-b65f897b010b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.383818] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 800.438611] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 800.499993] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 800.500147] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 801.193290] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f765837-4d61-4bac-a449-b8c4cf5e21af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.200692] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25256d9a-e912-44d0-8d3e-ba82c8fd801f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.230714] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67e1b67d-3ca1-4417-b19d-af8849bbbc39 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.240198] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c771a708-036d-4a45-b8ac-366ad4bab9c5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.253455] env[69227]: DEBUG nova.compute.provider_tree [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.756351] env[69227]: DEBUG nova.scheduler.client.report [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 801.759565] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.907770] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "a1095b15-f871-4dd2-9712-330d26ba4143" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.261849] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.916s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.262404] env[69227]: ERROR nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 802.262404] env[69227]: Faults: ['InvalidArgument'] [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] Traceback (most recent call last): [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self.driver.spawn(context, instance, image_meta, [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self._vmops.spawn(context, instance, image_meta, injected_files, [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self._fetch_image_if_missing(context, vi) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] image_cache(vi, tmp_image_ds_loc) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] vm_util.copy_virtual_disk( [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] session._wait_for_task(vmdk_copy_task) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return self.wait_for_task(task_ref) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return evt.wait() [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] result = hub.switch() [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] return self.greenlet.switch() [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] self.f(*self.args, **self.kw) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] raise exceptions.translate_fault(task_info.error) [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] Faults: ['InvalidArgument'] [ 802.262404] env[69227]: ERROR nova.compute.manager [instance: 1547effe-8061-4aba-8e1f-302617eee198] [ 802.264342] env[69227]: DEBUG nova.compute.utils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 802.264787] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Build of instance 1547effe-8061-4aba-8e1f-302617eee198 was re-scheduled: A specified parameter was not correct: fileType [ 802.264787] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 802.265149] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 802.265318] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 802.265472] env[69227]: DEBUG nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 802.265632] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 803.018171] env[69227]: DEBUG nova.network.neutron [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.521366] env[69227]: INFO nova.compute.manager [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] [instance: 1547effe-8061-4aba-8e1f-302617eee198] Took 1.26 seconds to deallocate network for instance. [ 803.532618] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 803.532933] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.039619] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.039619] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 804.039619] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 804.543163] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 31371445-428d-4236-a833-f07122553cfa] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 804.545028] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 804.545311] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.545311] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.545458] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.545603] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.546605] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.546605] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 804.546605] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 804.546605] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 805.049865] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.050808] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.051510] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.052234] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 805.054232] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d39be37-3f0a-46b7-91b1-ae4ff9fabfc8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.065330] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f34893-4149-49fe-a59d-043e54035a41 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.083248] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4241f8-c1d9-411b-9149-ae6e4b81c1b3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.090496] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8686f803-4f34-4fa6-b5df-aec5fa2bc892 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.125667] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180977MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 805.125667] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.125835] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.561901] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c19d7464-9765-4215-841b-b906bd67f9a0 tempest-ServerDiagnosticsNegativeTest-1129529784 tempest-ServerDiagnosticsNegativeTest-1129529784-project-member] Lock "1547effe-8061-4aba-8e1f-302617eee198" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.324s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.562674] env[69227]: Traceback (most recent call last): [ 805.562722] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 805.562722] env[69227]: self.driver.spawn(context, instance, image_meta, [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 805.562722] env[69227]: self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 805.562722] env[69227]: self._fetch_image_if_missing(context, vi) [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 805.562722] env[69227]: image_cache(vi, tmp_image_ds_loc) [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 805.562722] env[69227]: vm_util.copy_virtual_disk( [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 805.562722] env[69227]: session._wait_for_task(vmdk_copy_task) [ 805.562722] env[69227]: File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 805.562722] env[69227]: return self.wait_for_task(task_ref) [ 805.562722] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 805.562722] env[69227]: return evt.wait() [ 805.562722] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 805.562722] env[69227]: result = hub.switch() [ 805.562722] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 805.562722] env[69227]: return self.greenlet.switch() [ 805.562722] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 805.562722] env[69227]: self.f(*self.args, **self.kw) [ 805.562722] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 805.562722] env[69227]: raise exceptions.translate_fault(task_info.error) [ 805.563450] env[69227]: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 805.563450] env[69227]: Faults: ['InvalidArgument'] [ 805.563450] env[69227]: During handling of the above exception, another exception occurred: [ 805.563450] env[69227]: Traceback (most recent call last): [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 805.563450] env[69227]: self._build_and_run_instance(context, instance, image, [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 805.563450] env[69227]: raise exception.RescheduledException( [ 805.563450] env[69227]: nova.exception.RescheduledException: Build of instance 1547effe-8061-4aba-8e1f-302617eee198 was re-scheduled: A specified parameter was not correct: fileType [ 805.563450] env[69227]: Faults: ['InvalidArgument'] [ 805.563450] env[69227]: During handling of the above exception, another exception occurred: [ 805.563450] env[69227]: Traceback (most recent call last): [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenpool.py", line 87, in _spawn_n_impl [ 805.563450] env[69227]: func(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 805.563450] env[69227]: return func(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 805.563450] env[69227]: return f(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 2347, in _locked_do_build_and_run_instance [ 805.563450] env[69227]: result = self._do_build_and_run_instance(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 805.563450] env[69227]: with excutils.save_and_reraise_exception(): [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.563450] env[69227]: self.force_reraise() [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.563450] env[69227]: raise self.value [ 805.563450] env[69227]: File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 805.563450] env[69227]: return f(self, context, *args, **kw) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 805.563450] env[69227]: with excutils.save_and_reraise_exception(): [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.563450] env[69227]: self.force_reraise() [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.563450] env[69227]: raise self.value [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 805.563450] env[69227]: return function(self, context, *args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 805.563450] env[69227]: return function(self, context, *args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 805.563450] env[69227]: return function(self, context, *args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/compute/manager.py", line 2491, in _do_build_and_run_instance [ 805.563450] env[69227]: instance.save() [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 209, in wrapper [ 805.563450] env[69227]: updates, result = self.indirection_api.object_action( [ 805.563450] env[69227]: File "/opt/stack/nova/nova/conductor/rpcapi.py", line 247, in object_action [ 805.563450] env[69227]: return cctxt.call(context, 'object_action', objinst=objinst, [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/client.py", line 190, in call [ 805.563450] env[69227]: result = self.transport._send( [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/transport.py", line 123, in _send [ 805.563450] env[69227]: return self._driver.send(target, ctxt, message, [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 800, in send [ 805.563450] env[69227]: return self._send(target, ctxt, message, wait_for_reply, timeout, [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/_drivers/amqpdriver.py", line 792, in _send [ 805.563450] env[69227]: raise result [ 805.563450] env[69227]: nova.exception_Remote.InstanceNotFound_Remote: Instance 1547effe-8061-4aba-8e1f-302617eee198 could not be found. [ 805.563450] env[69227]: Traceback (most recent call last): [ 805.563450] env[69227]: File "/opt/stack/nova/nova/conductor/manager.py", line 142, in _object_dispatch [ 805.563450] env[69227]: return getattr(target, method)(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_versionedobjects/base.py", line 226, in wrapper [ 805.563450] env[69227]: return fn(self, *args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/objects/instance.py", line 878, in save [ 805.563450] env[69227]: old_ref, inst_ref = db.instance_update_and_get_original( [ 805.563450] env[69227]: File "/opt/stack/nova/nova/db/utils.py", line 35, in wrapper [ 805.563450] env[69227]: return f(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 144, in wrapper [ 805.563450] env[69227]: with excutils.save_and_reraise_exception() as ectxt: [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 805.563450] env[69227]: self.force_reraise() [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 805.563450] env[69227]: raise self.value [ 805.563450] env[69227]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/api.py", line 142, in wrapper [ 805.563450] env[69227]: return f(*args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 207, in wrapper [ 805.563450] env[69227]: return f(context, *args, **kwargs) [ 805.563450] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 2283, in instance_update_and_get_original [ 805.563450] env[69227]: instance_ref = _instance_get_by_uuid(context, instance_uuid, [ 805.563450] env[69227]: File "/opt/stack/nova/nova/db/main/api.py", line 1405, in _instance_get_by_uuid [ 805.563450] env[69227]: raise exception.InstanceNotFound(instance_id=uuid) [ 805.563450] env[69227]: nova.exception.InstanceNotFound: Instance 1547effe-8061-4aba-8e1f-302617eee198 could not be found. [ 806.066411] env[69227]: DEBUG nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 806.145119] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "4005bdf5-3826-4214-9fa6-f794c4f043df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.165149] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 31371445-428d-4236-a833-f07122553cfa actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165149] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a1095b15-f871-4dd2-9712-330d26ba4143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165149] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165149] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165149] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165388] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165418] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165551] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.165635] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 806.454609] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "334575bf-5847-41d5-85bd-e72f08a80a59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.588212] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.671328] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 11aaee2b-b47e-4078-9674-f46a5f7878ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 806.891868] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.172684] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4053c7e0-9f0d-4acf-90be-0dab69650838 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.677274] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1140bb76-ac01-4d31-996b-55e15f547497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 807.841201] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.179574] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 808.683137] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 10382ebb-37bc-4d8d-9555-442cb78e0555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.093203] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.188252] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 017c5882-2f2e-43e0-947a-6996bbdf73d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 809.691732] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 30ba5d34-7069-4cb5-8292-faa20327a662 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.194911] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 810.698605] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 81b5e27a-8113-49fc-a845-3160f3bfb030 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.204596] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3bfa20a9-e1b0-447f-8dcf-abb7bceee157 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.707771] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9e87431d-abde-4b1e-93a4-71d34e17308f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 811.871395] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.871395] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.910741] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.910741] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.212501] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4e6f82f5-9766-46fa-a28a-8f13c5c8fd58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 812.716059] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.219185] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4da35a64-ce89-4534-9af7-8eb8c1ec10ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 813.722537] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7fa6db1-6f80-4f30-84b1-6179b0774889 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.227103] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 877377d1-2c6c-4e43-b5db-5a4b6ceb99f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.731612] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0454511-5b02-4c32-b630-09215a79f7a7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.236851] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bf04f0eb-5e79-4ddf-a654-24091deb7fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 815.741884] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 588397c9-19ac-4994-8c16-18c77be79411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.246213] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 816.246474] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 816.246618] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 816.615266] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b99ab6-59bb-4b2a-9ebc-de183672c71c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.623668] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8231648c-c57e-4b3e-b423-483df3aa94f9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.653462] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6efca5b3-2965-485d-96e8-b2c4f38aafa6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.660928] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b3ff33c-01a6-41ca-941e-437b502c5033 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.673984] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.051892] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.054048] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.177628] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 817.684886] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 817.685193] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.559s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.685496] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.099s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.687175] env[69227]: INFO nova.compute.claims [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 818.192389] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.507s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.193402] env[69227]: DEBUG nova.compute.utils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Instance 11aaee2b-b47e-4078-9674-f46a5f7878ca could not be found. {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 818.195745] env[69227]: DEBUG nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Instance disappeared during build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 818.195975] env[69227]: DEBUG nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 818.196234] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "refresh_cache-11aaee2b-b47e-4078-9674-f46a5f7878ca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.196381] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquired lock "refresh_cache-11aaee2b-b47e-4078-9674-f46a5f7878ca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.196548] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 818.701073] env[69227]: DEBUG nova.compute.utils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Can not refresh info_cache because instance was not found {{(pid=69227) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 818.729888] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.859267] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.363759] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Releasing lock "refresh_cache-11aaee2b-b47e-4078-9674-f46a5f7878ca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.364072] env[69227]: DEBUG nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 819.364267] env[69227]: DEBUG nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 819.364439] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 819.387602] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 819.891297] env[69227]: DEBUG nova.network.neutron [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.394618] env[69227]: INFO nova.compute.manager [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: 11aaee2b-b47e-4078-9674-f46a5f7878ca] Took 1.03 seconds to deallocate network for instance. [ 821.413689] env[69227]: DEBUG oslo_concurrency.lockutils [None req-42c2d38e-108f-4423-a493-2b132bbf6e5d tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "11aaee2b-b47e-4078-9674-f46a5f7878ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.854s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.920197] env[69227]: DEBUG nova.compute.manager [None req-bd292eca-8d23-4d81-8924-ccb62a284456 tempest-ServerExternalEventsTest-1140727613 tempest-ServerExternalEventsTest-1140727613-project-member] [instance: 4053c7e0-9f0d-4acf-90be-0dab69650838] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 822.425462] env[69227]: DEBUG nova.compute.manager [None req-bd292eca-8d23-4d81-8924-ccb62a284456 tempest-ServerExternalEventsTest-1140727613 tempest-ServerExternalEventsTest-1140727613-project-member] [instance: 4053c7e0-9f0d-4acf-90be-0dab69650838] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 822.944615] env[69227]: DEBUG oslo_concurrency.lockutils [None req-bd292eca-8d23-4d81-8924-ccb62a284456 tempest-ServerExternalEventsTest-1140727613 tempest-ServerExternalEventsTest-1140727613-project-member] Lock "4053c7e0-9f0d-4acf-90be-0dab69650838" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.577s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 823.448541] env[69227]: DEBUG nova.compute.manager [None req-3bc9c384-cad6-4782-947d-19679b9ae4fa tempest-ServerActionsTestJSON-1367138755 tempest-ServerActionsTestJSON-1367138755-project-member] [instance: 1140bb76-ac01-4d31-996b-55e15f547497] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 823.530851] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.531806] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.952904] env[69227]: DEBUG nova.compute.manager [None req-3bc9c384-cad6-4782-947d-19679b9ae4fa tempest-ServerActionsTestJSON-1367138755 tempest-ServerActionsTestJSON-1367138755-project-member] [instance: 1140bb76-ac01-4d31-996b-55e15f547497] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.473457] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3bc9c384-cad6-4782-947d-19679b9ae4fa tempest-ServerActionsTestJSON-1367138755 tempest-ServerActionsTestJSON-1367138755-project-member] Lock "1140bb76-ac01-4d31-996b-55e15f547497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.565s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.979279] env[69227]: DEBUG nova.compute.manager [None req-c181bdac-9eca-4d29-8ade-7d1b1f3f1dfb tempest-VolumesAssistedSnapshotsTest-2024810052 tempest-VolumesAssistedSnapshotsTest-2024810052-project-member] [instance: 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 825.485410] env[69227]: DEBUG nova.compute.manager [None req-c181bdac-9eca-4d29-8ade-7d1b1f3f1dfb tempest-VolumesAssistedSnapshotsTest-2024810052 tempest-VolumesAssistedSnapshotsTest-2024810052-project-member] [instance: 7e10b03c-76c8-4ff7-9b66-c578cbe28f2e] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 826.008537] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c181bdac-9eca-4d29-8ade-7d1b1f3f1dfb tempest-VolumesAssistedSnapshotsTest-2024810052 tempest-VolumesAssistedSnapshotsTest-2024810052-project-member] Lock "7e10b03c-76c8-4ff7-9b66-c578cbe28f2e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.266s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.514160] env[69227]: DEBUG nova.compute.manager [None req-9ffc9bc4-e90a-4bdb-b734-78e441856755 tempest-ServersWithSpecificFlavorTestJSON-1959141027 tempest-ServersWithSpecificFlavorTestJSON-1959141027-project-member] [instance: 10382ebb-37bc-4d8d-9555-442cb78e0555] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 827.019450] env[69227]: DEBUG nova.compute.manager [None req-9ffc9bc4-e90a-4bdb-b734-78e441856755 tempest-ServersWithSpecificFlavorTestJSON-1959141027 tempest-ServersWithSpecificFlavorTestJSON-1959141027-project-member] [instance: 10382ebb-37bc-4d8d-9555-442cb78e0555] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 827.540050] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ffc9bc4-e90a-4bdb-b734-78e441856755 tempest-ServersWithSpecificFlavorTestJSON-1959141027 tempest-ServersWithSpecificFlavorTestJSON-1959141027-project-member] Lock "10382ebb-37bc-4d8d-9555-442cb78e0555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.034s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.044731] env[69227]: DEBUG nova.compute.manager [None req-d4189b9c-4c6a-468f-a7ac-8e78849873a5 tempest-ImagesOneServerTestJSON-2014017566 tempest-ImagesOneServerTestJSON-2014017566-project-member] [instance: 017c5882-2f2e-43e0-947a-6996bbdf73d0] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 828.547469] env[69227]: DEBUG nova.compute.manager [None req-d4189b9c-4c6a-468f-a7ac-8e78849873a5 tempest-ImagesOneServerTestJSON-2014017566 tempest-ImagesOneServerTestJSON-2014017566-project-member] [instance: 017c5882-2f2e-43e0-947a-6996bbdf73d0] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 829.068066] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4189b9c-4c6a-468f-a7ac-8e78849873a5 tempest-ImagesOneServerTestJSON-2014017566 tempest-ImagesOneServerTestJSON-2014017566-project-member] Lock "017c5882-2f2e-43e0-947a-6996bbdf73d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.042s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.571677] env[69227]: DEBUG nova.compute.manager [None req-b6c4d79b-c7ab-49b9-8410-c803255d3715 tempest-ServersV294TestFqdnHostnames-639727780 tempest-ServersV294TestFqdnHostnames-639727780-project-member] [instance: 30ba5d34-7069-4cb5-8292-faa20327a662] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 830.077550] env[69227]: DEBUG nova.compute.manager [None req-b6c4d79b-c7ab-49b9-8410-c803255d3715 tempest-ServersV294TestFqdnHostnames-639727780 tempest-ServersV294TestFqdnHostnames-639727780-project-member] [instance: 30ba5d34-7069-4cb5-8292-faa20327a662] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 830.595500] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6c4d79b-c7ab-49b9-8410-c803255d3715 tempest-ServersV294TestFqdnHostnames-639727780 tempest-ServersV294TestFqdnHostnames-639727780-project-member] Lock "30ba5d34-7069-4cb5-8292-faa20327a662" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.413s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.104888] env[69227]: DEBUG nova.compute.manager [None req-a5d75b32-28b4-499b-8456-15c6d83d7cbb tempest-ServerMetadataTestJSON-2111490668 tempest-ServerMetadataTestJSON-2111490668-project-member] [instance: 8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 831.619789] env[69227]: DEBUG nova.compute.manager [None req-a5d75b32-28b4-499b-8456-15c6d83d7cbb tempest-ServerMetadataTestJSON-2111490668 tempest-ServerMetadataTestJSON-2111490668-project-member] [instance: 8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 832.080021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d0a31355-d650-4585-8583-e47dce9b1c9c tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Acquiring lock "0b79868a-be93-4c85-bac0-4167c4ea9b2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.080021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d0a31355-d650-4585-8583-e47dce9b1c9c tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "0b79868a-be93-4c85-bac0-4167c4ea9b2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.134665] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a5d75b32-28b4-499b-8456-15c6d83d7cbb tempest-ServerMetadataTestJSON-2111490668 tempest-ServerMetadataTestJSON-2111490668-project-member] Lock "8cd8f52e-5df8-4f5b-b59b-5d7fd37d0638" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.574s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.641474] env[69227]: DEBUG nova.compute.manager [None req-a696b0a8-4b00-4d1b-8fec-ef5c3373d5d4 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 81b5e27a-8113-49fc-a845-3160f3bfb030] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 833.148217] env[69227]: DEBUG nova.compute.manager [None req-a696b0a8-4b00-4d1b-8fec-ef5c3373d5d4 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 81b5e27a-8113-49fc-a845-3160f3bfb030] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 833.667576] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a696b0a8-4b00-4d1b-8fec-ef5c3373d5d4 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "81b5e27a-8113-49fc-a845-3160f3bfb030" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.501s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.172064] env[69227]: DEBUG nova.compute.manager [None req-fcc98451-2c17-453c-aaa7-dc512b250cf8 tempest-InstanceActionsNegativeTestJSON-677229811 tempest-InstanceActionsNegativeTestJSON-677229811-project-member] [instance: 3bfa20a9-e1b0-447f-8dcf-abb7bceee157] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 834.683304] env[69227]: DEBUG nova.compute.manager [None req-fcc98451-2c17-453c-aaa7-dc512b250cf8 tempest-InstanceActionsNegativeTestJSON-677229811 tempest-InstanceActionsNegativeTestJSON-677229811-project-member] [instance: 3bfa20a9-e1b0-447f-8dcf-abb7bceee157] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.206871] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fcc98451-2c17-453c-aaa7-dc512b250cf8 tempest-InstanceActionsNegativeTestJSON-677229811 tempest-InstanceActionsNegativeTestJSON-677229811-project-member] Lock "3bfa20a9-e1b0-447f-8dcf-abb7bceee157" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.028s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.712498] env[69227]: DEBUG nova.compute.manager [None req-9892da88-9748-49e9-89af-c5e7c02eb2a3 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 9e87431d-abde-4b1e-93a4-71d34e17308f] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 836.222087] env[69227]: DEBUG nova.compute.manager [None req-9892da88-9748-49e9-89af-c5e7c02eb2a3 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 9e87431d-abde-4b1e-93a4-71d34e17308f] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 836.738781] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9892da88-9748-49e9-89af-c5e7c02eb2a3 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "9e87431d-abde-4b1e-93a4-71d34e17308f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.464s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.739401] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8639c8ce-4b8f-4dd6-b267-2fa392fc0f70 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Acquiring lock "74713144-66f6-4513-bac5-379f4a1b1cd1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.739636] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8639c8ce-4b8f-4dd6-b267-2fa392fc0f70 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "74713144-66f6-4513-bac5-379f4a1b1cd1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 837.244166] env[69227]: DEBUG nova.compute.manager [None req-ac0b5017-2849-49f5-82b6-9c90191368d3 tempest-ServersAdmin275Test-2002803998 tempest-ServersAdmin275Test-2002803998-project-member] [instance: 4e6f82f5-9766-46fa-a28a-8f13c5c8fd58] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 837.750133] env[69227]: DEBUG nova.compute.manager [None req-ac0b5017-2849-49f5-82b6-9c90191368d3 tempest-ServersAdmin275Test-2002803998 tempest-ServersAdmin275Test-2002803998-project-member] [instance: 4e6f82f5-9766-46fa-a28a-8f13c5c8fd58] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 838.267528] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ac0b5017-2849-49f5-82b6-9c90191368d3 tempest-ServersAdmin275Test-2002803998 tempest-ServersAdmin275Test-2002803998-project-member] Lock "4e6f82f5-9766-46fa-a28a-8f13c5c8fd58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.663s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.771484] env[69227]: DEBUG nova.compute.manager [None req-e18e8e61-821e-4faa-86c1-8fe94fbe2f05 tempest-AttachInterfacesUnderV243Test-1792428759 tempest-AttachInterfacesUnderV243Test-1792428759-project-member] [instance: 4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 839.282646] env[69227]: DEBUG nova.compute.manager [None req-e18e8e61-821e-4faa-86c1-8fe94fbe2f05 tempest-AttachInterfacesUnderV243Test-1792428759 tempest-AttachInterfacesUnderV243Test-1792428759-project-member] [instance: 4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 839.803942] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e18e8e61-821e-4faa-86c1-8fe94fbe2f05 tempest-AttachInterfacesUnderV243Test-1792428759 tempest-AttachInterfacesUnderV243Test-1792428759-project-member] Lock "4eecd6a7-efe1-42bd-8cb9-6cd6116c1c55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.756s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.306423] env[69227]: DEBUG nova.compute.manager [None req-d213183b-15c8-4a78-b86b-4e5f65ff7c53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 4da35a64-ce89-4534-9af7-8eb8c1ec10ba] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 840.813043] env[69227]: DEBUG nova.compute.manager [None req-d213183b-15c8-4a78-b86b-4e5f65ff7c53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 4da35a64-ce89-4534-9af7-8eb8c1ec10ba] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.040794] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7ca913cd-0ae0-4f4c-80fd-8c3d4d7ad6e6 tempest-ServersNegativeTestMultiTenantJSON-1615902621 tempest-ServersNegativeTestMultiTenantJSON-1615902621-project-member] Acquiring lock "43ec99d7-fc56-493f-b845-710027a320c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.041053] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7ca913cd-0ae0-4f4c-80fd-8c3d4d7ad6e6 tempest-ServersNegativeTestMultiTenantJSON-1615902621 tempest-ServersNegativeTestMultiTenantJSON-1615902621-project-member] Lock "43ec99d7-fc56-493f-b845-710027a320c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.162225] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e055c0f-8bcf-439d-83f7-a2736d4e10ed tempest-ImagesOneServerNegativeTestJSON-1775796980 tempest-ImagesOneServerNegativeTestJSON-1775796980-project-member] Acquiring lock "5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.162225] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e055c0f-8bcf-439d-83f7-a2736d4e10ed tempest-ImagesOneServerNegativeTestJSON-1775796980 tempest-ImagesOneServerNegativeTestJSON-1775796980-project-member] Lock "5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.336898] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d213183b-15c8-4a78-b86b-4e5f65ff7c53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "4da35a64-ce89-4534-9af7-8eb8c1ec10ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.407s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.840078] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 842.160640] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ef622b4f-5fd9-456f-8c71-8d774207061f tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "de21ad29-1e75-44b6-b1d8-ba0e702a7fe2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.160640] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ef622b4f-5fd9-456f-8c71-8d774207061f tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "de21ad29-1e75-44b6-b1d8-ba0e702a7fe2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.365907] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.365907] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.367210] env[69227]: INFO nova.compute.claims [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 843.793417] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-231e0d1c-5208-4d5f-bdb6-01c320a69b83 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.801305] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d81be46-40bb-4f54-8ccb-c1d6276fcd05 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.837723] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7488dd79-0c9b-4475-a37c-0f0e479ecff5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.845643] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19fb9fe-3e8f-44cd-a238-050e0407b80b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.862029] env[69227]: DEBUG nova.compute.provider_tree [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.998650] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ca97291b-90aa-4a97-b6ae-f19979fc7637 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "e77cfa8f-4678-4fa0-9cc8-750895c85013" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.998866] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ca97291b-90aa-4a97-b6ae-f19979fc7637 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "e77cfa8f-4678-4fa0-9cc8-750895c85013" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.367072] env[69227]: DEBUG nova.scheduler.client.report [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 844.873700] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.874239] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 845.379248] env[69227]: DEBUG nova.compute.utils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 845.380772] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 845.380944] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 845.437113] env[69227]: DEBUG nova.policy [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ad3f38f58d74472086fcab69edb72f6e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e4e3c9a511a04197af3f5178c4918274', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 845.884239] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 845.993348] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Successfully created port: d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 846.899321] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 846.924852] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 846.925345] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 846.925644] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 846.925907] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 846.927496] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 846.927677] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 846.927915] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 846.928185] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 846.928340] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 846.928507] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 846.928677] env[69227]: DEBUG nova.virt.hardware [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 846.929703] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce13454-4cba-42f3-901f-c0d219ac5ba3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.938204] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fddb41-3579-4b56-b0ea-c2fe1005e8df {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.202297] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.521820] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6317fd3-ac53-462d-a029-9b11675c33f9 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "e04f79e6-e7af-4d94-aed0-3f6aecbd8806" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.522100] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6317fd3-ac53-462d-a029-9b11675c33f9 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "e04f79e6-e7af-4d94-aed0-3f6aecbd8806" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.750573] env[69227]: DEBUG nova.compute.manager [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Received event network-vif-plugged-d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 847.750837] env[69227]: DEBUG oslo_concurrency.lockutils [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] Acquiring lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.751069] env[69227]: DEBUG oslo_concurrency.lockutils [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.751257] env[69227]: DEBUG oslo_concurrency.lockutils [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.751437] env[69227]: DEBUG nova.compute.manager [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] No waiting events found dispatching network-vif-plugged-d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 847.751818] env[69227]: WARNING nova.compute.manager [req-6c52a0a6-9a59-4a12-b611-8846de1a9e15 req-4f9935f0-f9ac-44d1-a459-ebd2a9236530 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Received unexpected event network-vif-plugged-d44a62f2-69e6-4c73-9f83-a6baffd088db for instance with vm_state building and task_state deleting. [ 847.827411] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Successfully updated port: d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.330277] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.330534] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquired lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.330577] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 848.884782] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 849.072061] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updating instance_info_cache with network_info: [{"id": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "address": "fa:16:3e:3c:63:95", "network": {"id": "3c939876-a074-4979-8b85-384fded1149c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-31580647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4e3c9a511a04197af3f5178c4918274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd44a62f2-69", "ovs_interfaceid": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.172781] env[69227]: WARNING oslo_vmware.rw_handles [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 849.172781] env[69227]: ERROR oslo_vmware.rw_handles [ 849.173246] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 849.175471] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 849.175734] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Copying Virtual Disk [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/b645f137-9d48-47d0-baec-ebf543aa3d2e/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 849.176027] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe7e5d69-776b-45cd-bd2a-4c0dac3e9ddc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.185503] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for the task: (returnval){ [ 849.185503] env[69227]: value = "task-3475016" [ 849.185503] env[69227]: _type = "Task" [ 849.185503] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.194229] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Task: {'id': task-3475016, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.574677] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Releasing lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.575028] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance network_info: |[{"id": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "address": "fa:16:3e:3c:63:95", "network": {"id": "3c939876-a074-4979-8b85-384fded1149c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-31580647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4e3c9a511a04197af3f5178c4918274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd44a62f2-69", "ovs_interfaceid": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 849.575455] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:63:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd44a62f2-69e6-4c73-9f83-a6baffd088db', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 849.583166] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Creating folder: Project (e4e3c9a511a04197af3f5178c4918274). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.583446] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e9bf14c-1b8a-4f8d-897f-fa1ff7ce9a9e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.595602] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Created folder: Project (e4e3c9a511a04197af3f5178c4918274) in parent group-v694623. [ 849.595906] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Creating folder: Instances. Parent ref: group-v694663. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 849.596234] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ee7f74a-e8dd-499a-b630-7fc7e2ddd049 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.605510] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Created folder: Instances in parent group-v694663. [ 849.605754] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 849.605954] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 849.606165] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5f77316-4c9f-4480-9aff-ea6bd18908b9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.630355] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 849.630355] env[69227]: value = "task-3475019" [ 849.630355] env[69227]: _type = "Task" [ 849.630355] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.641023] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475019, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.699023] env[69227]: DEBUG oslo_vmware.exceptions [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 849.702272] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.703225] env[69227]: ERROR nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 849.703225] env[69227]: Faults: ['InvalidArgument'] [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] Traceback (most recent call last): [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] yield resources [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self.driver.spawn(context, instance, image_meta, [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self._fetch_image_if_missing(context, vi) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] image_cache(vi, tmp_image_ds_loc) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] vm_util.copy_virtual_disk( [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] session._wait_for_task(vmdk_copy_task) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return self.wait_for_task(task_ref) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return evt.wait() [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] result = hub.switch() [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return self.greenlet.switch() [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self.f(*self.args, **self.kw) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] raise exceptions.translate_fault(task_info.error) [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] Faults: ['InvalidArgument'] [ 849.703225] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] [ 849.703225] env[69227]: INFO nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Terminating instance [ 849.704818] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.704967] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.705289] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d7d848c-dd7d-4692-a110-bac35adde05e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.708749] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 849.709421] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 849.709715] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc901a3-68e3-4016-af3c-2bc2c7388cbb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.717159] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 849.717401] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ed594683-9e7a-47db-98ea-7a9b5600fee5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.719976] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.720158] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 849.721142] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7a45433-0624-460a-8efc-349282317a19 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.726362] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 849.726362] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52673dd7-9d36-4767-0a18-5a94190fdc1b" [ 849.726362] env[69227]: _type = "Task" [ 849.726362] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.733352] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52673dd7-9d36-4767-0a18-5a94190fdc1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.786506] env[69227]: DEBUG nova.compute.manager [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Received event network-changed-d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 849.786506] env[69227]: DEBUG nova.compute.manager [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Refreshing instance network info cache due to event network-changed-d44a62f2-69e6-4c73-9f83-a6baffd088db. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 849.786506] env[69227]: DEBUG oslo_concurrency.lockutils [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] Acquiring lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.786506] env[69227]: DEBUG oslo_concurrency.lockutils [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] Acquired lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.786506] env[69227]: DEBUG nova.network.neutron [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Refreshing network info cache for port d44a62f2-69e6-4c73-9f83-a6baffd088db {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 849.848052] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 849.848052] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 849.848052] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Deleting the datastore file [datastore2] 31371445-428d-4236-a833-f07122553cfa {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 849.848508] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed40e047-d449-4710-ae2f-f73165c7879a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.857084] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for the task: (returnval){ [ 849.857084] env[69227]: value = "task-3475022" [ 849.857084] env[69227]: _type = "Task" [ 849.857084] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.867460] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Task: {'id': task-3475022, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.141081] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475019, 'name': CreateVM_Task, 'duration_secs': 0.308409} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.141318] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 850.142463] env[69227]: DEBUG oslo_vmware.service [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f95b35-32f6-4ba5-bcdb-1e9608151626 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.148258] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.148429] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.148808] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 850.149088] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c547fdb9-79ed-4ebf-a077-dbd2f06a66eb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.154160] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for the task: (returnval){ [ 850.154160] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5262eb54-0c91-d23b-b120-08ecacd98487" [ 850.154160] env[69227]: _type = "Task" [ 850.154160] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.168702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.168956] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 850.169203] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.169352] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.169528] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.169773] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88eebdb5-1216-4b19-87fc-f993f70d8ac8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.185410] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.185612] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 850.186682] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafbdc47-539d-4959-8551-1a4761d8d6f5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.193086] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b90d92ba-404a-4046-9921-f99ac5c45209 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.197909] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for the task: (returnval){ [ 850.197909] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5203bd84-2e8c-6e75-d237-98dbed4d0cda" [ 850.197909] env[69227]: _type = "Task" [ 850.197909] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.207105] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5203bd84-2e8c-6e75-d237-98dbed4d0cda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.236036] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 850.236386] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating directory with path [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.236609] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d15ac10f-9aaf-4847-9708-7455e3438bbf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.247893] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created directory with path [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.248186] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Fetch image to [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 850.248369] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 850.249108] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e52af2c-fe0c-445a-8aea-95691430b9a1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.256052] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dadc4d8-2929-4888-beb3-304a1618d15e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.270880] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23687a87-28ce-410b-aa66-abfb94e0133c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.308029] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162dc4b3-6412-43f9-a535-862cb730015b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.314571] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c868d304-0456-4259-8afb-6f85022b3be5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.335024] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 850.372253] env[69227]: DEBUG oslo_vmware.api [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Task: {'id': task-3475022, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105543} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.372640] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 850.372710] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 850.372842] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 850.373052] env[69227]: INFO nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Took 0.66 seconds to destroy the instance on the hypervisor. [ 850.377811] env[69227]: DEBUG nova.compute.claims [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 850.377986] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.378315] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 850.389759] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 850.445936] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.449368] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.450689] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.450842] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 850.452813] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 850.452986] env[69227]: DEBUG oslo_vmware.rw_handles [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 850.708753] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 850.708753] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Creating directory with path [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.708753] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f93fa59c-6a53-4abb-bea6-166118b19f16 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.732843] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Created directory with path [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.732974] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Fetch image to [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 850.733159] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore1 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 850.733932] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e962fd42-8ba2-46b9-950b-5e4687967a75 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.741307] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bc1f8c-b594-489a-82bf-69e9a025c705 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.750758] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc326d1-16f1-49c2-9803-84ed9981a6be {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.786317] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c990680b-18ed-45da-9b07-6ce965e56033 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.792391] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-362c6d42-9900-4980-ae1d-f59569f2ee8c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.820810] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore1 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 850.886473] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 850.954491] env[69227]: DEBUG nova.network.neutron [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updated VIF entry in instance network info cache for port d44a62f2-69e6-4c73-9f83-a6baffd088db. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 850.954971] env[69227]: DEBUG nova.network.neutron [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updating instance_info_cache with network_info: [{"id": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "address": "fa:16:3e:3c:63:95", "network": {"id": "3c939876-a074-4979-8b85-384fded1149c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-31580647-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e4e3c9a511a04197af3f5178c4918274", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd44a62f2-69", "ovs_interfaceid": "d44a62f2-69e6-4c73-9f83-a6baffd088db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.960385] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 0 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 850.962778] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 850.962778] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 850.962778] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 850.962778] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 851.342124] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7777c4b-1f0e-4f49-afe3-e288d05404ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.352621] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49badb85-78e3-4107-9525-72f9b85b734f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.385261] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93badf92-7d98-4098-8c66-9be0b0a6ed09 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.394197] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1a174d-c845-4c08-9c58-b08355cd965d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.407098] env[69227]: DEBUG nova.compute.provider_tree [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.462357] env[69227]: DEBUG oslo_concurrency.lockutils [req-e69ac885-3ec0-44b7-8bd3-2f76d7d2b7e7 req-89d34fcb-1ab5-4003-874a-91e099a75d44 service nova] Releasing lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.467719] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 851.910120] env[69227]: DEBUG nova.scheduler.client.report [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 852.416561] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.038s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.417068] env[69227]: ERROR nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.417068] env[69227]: Faults: ['InvalidArgument'] [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] Traceback (most recent call last): [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self.driver.spawn(context, instance, image_meta, [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self._vmops.spawn(context, instance, image_meta, injected_files, [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self._fetch_image_if_missing(context, vi) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] image_cache(vi, tmp_image_ds_loc) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] vm_util.copy_virtual_disk( [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] session._wait_for_task(vmdk_copy_task) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return self.wait_for_task(task_ref) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return evt.wait() [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] result = hub.switch() [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] return self.greenlet.switch() [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] self.f(*self.args, **self.kw) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] raise exceptions.translate_fault(task_info.error) [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] Faults: ['InvalidArgument'] [ 852.417068] env[69227]: ERROR nova.compute.manager [instance: 31371445-428d-4236-a833-f07122553cfa] [ 852.418055] env[69227]: DEBUG nova.compute.utils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 852.419924] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Build of instance 31371445-428d-4236-a833-f07122553cfa was re-scheduled: A specified parameter was not correct: fileType [ 852.419924] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 852.420317] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 852.420517] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 852.420786] env[69227]: DEBUG nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 852.420852] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 853.872561] env[69227]: DEBUG nova.network.neutron [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.946428] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 853.946428] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 853.946997] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 854.375734] env[69227]: INFO nova.compute.manager [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Took 1.95 seconds to deallocate network for instance. [ 854.450070] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.450608] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.450699] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.450827] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 854.451723] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e9c6f79-8adf-4e08-a15c-ea88b98d7a72 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.460824] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a4fc5f-d2df-49c7-a590-e752a852d2bf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.474894] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f844583f-019c-4882-b1c1-91dea7b5fc07 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.483893] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e000f8a-4706-4c90-857b-febc272ec4ba {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.516564] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180952MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 854.516723] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.516945] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.401191] env[69227]: INFO nova.scheduler.client.report [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Deleted allocations for instance 31371445-428d-4236-a833-f07122553cfa [ 855.544267] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a1095b15-f871-4dd2-9712-330d26ba4143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.544436] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.544563] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.544682] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.544895] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.546096] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.546096] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.546096] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.546096] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7fa6db1-6f80-4f30-84b1-6179b0774889 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 855.679227] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5082fdd3-3b3d-4bd8-86ff-8500608e97ab tempest-ServerActionsV293TestJSON-851954569 tempest-ServerActionsV293TestJSON-851954569-project-member] Acquiring lock "940ca4b4-783b-4527-8559-d00d9e48fd05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.679451] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5082fdd3-3b3d-4bd8-86ff-8500608e97ab tempest-ServerActionsV293TestJSON-851954569 tempest-ServerActionsV293TestJSON-851954569-project-member] Lock "940ca4b4-783b-4527-8559-d00d9e48fd05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.910291] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7100ec18-6c6f-4439-b519-c01940c842a4 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 257.642s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.914274] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 61.268s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.914274] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "31371445-428d-4236-a833-f07122553cfa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 855.914274] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.914274] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.914576] env[69227]: INFO nova.compute.manager [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Terminating instance [ 855.916380] env[69227]: DEBUG nova.compute.manager [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.916380] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 855.916522] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40bf52f3-61f8-460e-bbb8-d16975fb88bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.925203] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9854a64a-d15f-4a96-b16f-981fd8166a82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.959896] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 31371445-428d-4236-a833-f07122553cfa could not be found. [ 855.960130] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.960314] env[69227]: INFO nova.compute.manager [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 31371445-428d-4236-a833-f07122553cfa] Took 0.04 seconds to destroy the instance on the hypervisor. [ 855.960574] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.960791] env[69227]: DEBUG nova.compute.manager [-] [instance: 31371445-428d-4236-a833-f07122553cfa] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 855.960886] env[69227]: DEBUG nova.network.neutron [-] [instance: 31371445-428d-4236-a833-f07122553cfa] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 856.049774] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance bf04f0eb-5e79-4ddf-a654-24091deb7fbb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.415589] env[69227]: DEBUG nova.compute.manager [None req-00d873b0-15de-4ba1-ac33-803e1497fe7e tempest-ServersTestJSON-1429325674 tempest-ServersTestJSON-1429325674-project-member] [instance: 877377d1-2c6c-4e43-b5db-5a4b6ceb99f7] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 856.492624] env[69227]: DEBUG nova.network.neutron [-] [instance: 31371445-428d-4236-a833-f07122553cfa] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.552645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 588397c9-19ac-4994-8c16-18c77be79411 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 856.919757] env[69227]: DEBUG nova.compute.manager [None req-00d873b0-15de-4ba1-ac33-803e1497fe7e tempest-ServersTestJSON-1429325674 tempest-ServersTestJSON-1429325674-project-member] [instance: 877377d1-2c6c-4e43-b5db-5a4b6ceb99f7] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 856.995179] env[69227]: INFO nova.compute.manager [-] [instance: 31371445-428d-4236-a833-f07122553cfa] Took 1.03 seconds to deallocate network for instance. [ 857.056245] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.435736] env[69227]: DEBUG oslo_concurrency.lockutils [None req-00d873b0-15de-4ba1-ac33-803e1497fe7e tempest-ServersTestJSON-1429325674 tempest-ServersTestJSON-1429325674-project-member] Lock "877377d1-2c6c-4e43-b5db-5a4b6ceb99f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.606s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.560044] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 857.938801] env[69227]: DEBUG nova.compute.manager [None req-9ab8f215-0de5-4b62-abbe-c7ab32e389fb tempest-FloatingIPsAssociationNegativeTestJSON-442599654 tempest-FloatingIPsAssociationNegativeTestJSON-442599654-project-member] [instance: f0454511-5b02-4c32-b630-09215a79f7a7] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 858.021852] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e0791052-6e5c-4c4e-811c-32d5711dba36 tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "31371445-428d-4236-a833-f07122553cfa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.110s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.062582] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 858.066505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-485a3965-3038-4d9a-85ce-811160e1b584 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "4f6ce1c9-4790-4901-8462-1b24f52ef54e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.066505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-485a3965-3038-4d9a-85ce-811160e1b584 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "4f6ce1c9-4790-4901-8462-1b24f52ef54e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.443281] env[69227]: DEBUG nova.compute.manager [None req-9ab8f215-0de5-4b62-abbe-c7ab32e389fb tempest-FloatingIPsAssociationNegativeTestJSON-442599654 tempest-FloatingIPsAssociationNegativeTestJSON-442599654-project-member] [instance: f0454511-5b02-4c32-b630-09215a79f7a7] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 858.565772] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 858.652954] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e4ea5218-d305-4ebc-a4b8-f5eea393074c tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Acquiring lock "532fb3f7-f71e-4906-bf1a-c15f9762c04a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.652954] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e4ea5218-d305-4ebc-a4b8-f5eea393074c tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "532fb3f7-f71e-4906-bf1a-c15f9762c04a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.956357] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9ab8f215-0de5-4b62-abbe-c7ab32e389fb tempest-FloatingIPsAssociationNegativeTestJSON-442599654 tempest-FloatingIPsAssociationNegativeTestJSON-442599654-project-member] Lock "f0454511-5b02-4c32-b630-09215a79f7a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.297s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.069802] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 859.458932] env[69227]: DEBUG nova.compute.manager [None req-e64cbcd7-b590-4874-bd80-0b7a04001fd8 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: bf04f0eb-5e79-4ddf-a654-24091deb7fbb] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 859.571951] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b79868a-be93-4c85-bac0-4167c4ea9b2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 859.964173] env[69227]: DEBUG nova.compute.manager [None req-e64cbcd7-b590-4874-bd80-0b7a04001fd8 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: bf04f0eb-5e79-4ddf-a654-24091deb7fbb] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.075190] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74713144-66f6-4513-bac5-379f4a1b1cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.478578] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e64cbcd7-b590-4874-bd80-0b7a04001fd8 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "bf04f0eb-5e79-4ddf-a654-24091deb7fbb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.898s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.578197] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43ec99d7-fc56-493f-b845-710027a320c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 860.981474] env[69227]: DEBUG nova.compute.manager [None req-9070ddb7-33cc-419f-b1f5-fb092a6efd08 tempest-TenantUsagesTestJSON-1708237157 tempest-TenantUsagesTestJSON-1708237157-project-member] [instance: 588397c9-19ac-4994-8c16-18c77be79411] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.081098] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 861.486685] env[69227]: DEBUG nova.compute.manager [None req-9070ddb7-33cc-419f-b1f5-fb092a6efd08 tempest-TenantUsagesTestJSON-1708237157 tempest-TenantUsagesTestJSON-1708237157-project-member] [instance: 588397c9-19ac-4994-8c16-18c77be79411] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 861.583772] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance de21ad29-1e75-44b6-b1d8-ba0e702a7fe2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.088596] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e77cfa8f-4678-4fa0-9cc8-750895c85013 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.095027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9070ddb7-33cc-419f-b1f5-fb092a6efd08 tempest-TenantUsagesTestJSON-1708237157 tempest-TenantUsagesTestJSON-1708237157-project-member] Lock "588397c9-19ac-4994-8c16-18c77be79411" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.515s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.591999] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e04f79e6-e7af-4d94-aed0-3f6aecbd8806 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 862.598777] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 863.098942] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 940ca4b4-783b-4527-8559-d00d9e48fd05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 863.099242] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 863.099393] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 863.122177] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 863.128622] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.139238] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 863.139456] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 863.151212] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 863.169108] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 863.505122] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4528bc6-d39f-40ee-9783-326c7ca5bd7f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.514172] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb492fbb-8bcf-48af-ac78-d5145811764e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.543118] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5cb304-a488-467a-ae05-4a6e3d1a67f6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.550302] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c639da-99e4-4369-9454-4d32d3640221 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.563415] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.066746] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 864.572154] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 864.573081] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.055s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.573788] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.445s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.576025] env[69227]: INFO nova.compute.claims [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.184288] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1120a5de-aae7-4d20-99db-888b38ad375a tempest-InstanceActionsV221TestJSON-1168477305 tempest-InstanceActionsV221TestJSON-1168477305-project-member] Acquiring lock "cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.184552] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1120a5de-aae7-4d20-99db-888b38ad375a tempest-InstanceActionsV221TestJSON-1168477305 tempest-InstanceActionsV221TestJSON-1168477305-project-member] Lock "cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.912177] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffc6ad46-75fd-48b6-b941-fee537052781 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.920234] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1d1ae4-4d1d-468f-a27d-b642fc9626eb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.959239] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9da7a8-0f64-4c5a-b072-f205bacbd0af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.967215] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fbf3b1-7c6f-4157-804a-9f2b11ff7862 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.983068] env[69227]: DEBUG nova.compute.provider_tree [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.054173] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.054284] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.054649] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 866.054649] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 866.487555] env[69227]: DEBUG nova.scheduler.client.report [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 866.560974] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561145] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561273] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561399] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561520] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561641] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561760] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561876] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.561991] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.562118] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 866.562239] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 866.562428] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.562587] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.562729] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.001365] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.428s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.001895] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 867.506782] env[69227]: DEBUG nova.compute.utils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 867.508060] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 867.508291] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 867.554371] env[69227]: DEBUG nova.policy [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3096162f7e974b7ca64de744d8da94e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ad515c4e09a4c10948ddf23862039d9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 867.843988] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Successfully created port: ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.012136] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 869.021352] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 869.045565] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.045755] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.045914] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.046108] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.046258] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.046889] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.047197] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.047376] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.047547] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.047709] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.047879] env[69227]: DEBUG nova.virt.hardware [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.048771] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104c4f7e-b2d6-443c-9b1a-11956815f30e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.056945] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8be0207-d899-4247-bb5f-9bf98f585b31 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.276380] env[69227]: DEBUG nova.compute.manager [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Received event network-vif-plugged-ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 869.276380] env[69227]: DEBUG oslo_concurrency.lockutils [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] Acquiring lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.276380] env[69227]: DEBUG oslo_concurrency.lockutils [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.276380] env[69227]: DEBUG oslo_concurrency.lockutils [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.276380] env[69227]: DEBUG nova.compute.manager [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] No waiting events found dispatching network-vif-plugged-ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 869.276380] env[69227]: WARNING nova.compute.manager [req-2740701f-022a-4778-b06e-a2f9986c6274 req-8a86f6b5-7e71-4eb3-912e-453e833fbf03 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Received unexpected event network-vif-plugged-ec99ae33-7bc2-4485-8934-c7054ce71555 for instance with vm_state building and task_state spawning. [ 869.369282] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Successfully updated port: ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.871781] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.871958] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquired lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.872883] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 870.435165] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 870.628560] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Updating instance_info_cache with network_info: [{"id": "ec99ae33-7bc2-4485-8934-c7054ce71555", "address": "fa:16:3e:73:d6:10", "network": {"id": "b7fb976e-5a55-4c5f-89cd-14f77783b204", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1234943946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ad515c4e09a4c10948ddf23862039d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec99ae33-7b", "ovs_interfaceid": "ec99ae33-7bc2-4485-8934-c7054ce71555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.132035] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Releasing lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.132035] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance network_info: |[{"id": "ec99ae33-7bc2-4485-8934-c7054ce71555", "address": "fa:16:3e:73:d6:10", "network": {"id": "b7fb976e-5a55-4c5f-89cd-14f77783b204", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1234943946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ad515c4e09a4c10948ddf23862039d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec99ae33-7b", "ovs_interfaceid": "ec99ae33-7bc2-4485-8934-c7054ce71555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 871.132224] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:d6:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '535b175f-71d3-4226-81ab-ca253f27fedd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec99ae33-7bc2-4485-8934-c7054ce71555', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.139523] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Creating folder: Project (4ad515c4e09a4c10948ddf23862039d9). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 871.139785] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bec64096-3424-4085-a13d-9da174e74b72 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.151574] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Created folder: Project (4ad515c4e09a4c10948ddf23862039d9) in parent group-v694623. [ 871.151681] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Creating folder: Instances. Parent ref: group-v694666. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 871.151881] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-576821d6-2960-448e-ad31-9846e998f99f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.161326] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Created folder: Instances in parent group-v694666. [ 871.161559] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.161745] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 871.161984] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-810ca4d9-2772-4500-8ff5-80fe365b7078 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.180665] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.180665] env[69227]: value = "task-3475026" [ 871.180665] env[69227]: _type = "Task" [ 871.180665] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.187650] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475026, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.329763] env[69227]: DEBUG nova.compute.manager [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Received event network-changed-ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 871.329763] env[69227]: DEBUG nova.compute.manager [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Refreshing instance network info cache due to event network-changed-ec99ae33-7bc2-4485-8934-c7054ce71555. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 871.329763] env[69227]: DEBUG oslo_concurrency.lockutils [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] Acquiring lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.329763] env[69227]: DEBUG oslo_concurrency.lockutils [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] Acquired lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.329763] env[69227]: DEBUG nova.network.neutron [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Refreshing network info cache for port ec99ae33-7bc2-4485-8934-c7054ce71555 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 871.689769] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475026, 'name': CreateVM_Task, 'duration_secs': 0.33318} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.690953] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 871.691057] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.692025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.692025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.692025] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86b5ad43-de96-4d29-ae75-27c30dd23a09 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.695803] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for the task: (returnval){ [ 871.695803] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e94be6-9b8a-7cb6-d687-d9af05d7a09e" [ 871.695803] env[69227]: _type = "Task" [ 871.695803] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.704730] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e94be6-9b8a-7cb6-d687-d9af05d7a09e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.059764] env[69227]: DEBUG nova.network.neutron [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Updated VIF entry in instance network info cache for port ec99ae33-7bc2-4485-8934-c7054ce71555. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 872.060126] env[69227]: DEBUG nova.network.neutron [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Updating instance_info_cache with network_info: [{"id": "ec99ae33-7bc2-4485-8934-c7054ce71555", "address": "fa:16:3e:73:d6:10", "network": {"id": "b7fb976e-5a55-4c5f-89cd-14f77783b204", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1234943946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ad515c4e09a4c10948ddf23862039d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "535b175f-71d3-4226-81ab-ca253f27fedd", "external-id": "nsx-vlan-transportzone-155", "segmentation_id": 155, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec99ae33-7b", "ovs_interfaceid": "ec99ae33-7bc2-4485-8934-c7054ce71555", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.207787] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.207787] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.207787] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.562689] env[69227]: DEBUG oslo_concurrency.lockutils [req-d741e19a-4968-4d72-9f90-0ca38f14ee7b req-a8d4ea06-eeb1-4aa7-a311-3f85bd943929 service nova] Releasing lock "refresh_cache-be8dae7e-b829-455a-b8d3-73fb04c40128" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.646751] env[69227]: WARNING oslo_vmware.rw_handles [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 896.646751] env[69227]: ERROR oslo_vmware.rw_handles [ 896.647359] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 896.649432] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 896.649720] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Copying Virtual Disk [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/99a37f5f-1e23-428b-9153-4853d4de21bc/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 896.650070] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a8aeaf8-b22c-4d5d-a3d3-ffd7e7c89b3d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.657584] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 896.657584] env[69227]: value = "task-3475027" [ 896.657584] env[69227]: _type = "Task" [ 896.657584] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.666078] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.168071] env[69227]: DEBUG oslo_vmware.exceptions [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 897.168392] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.169059] env[69227]: ERROR nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 897.169059] env[69227]: Faults: ['InvalidArgument'] [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Traceback (most recent call last): [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] yield resources [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self.driver.spawn(context, instance, image_meta, [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self._vmops.spawn(context, instance, image_meta, injected_files, [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self._fetch_image_if_missing(context, vi) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] image_cache(vi, tmp_image_ds_loc) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] vm_util.copy_virtual_disk( [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] session._wait_for_task(vmdk_copy_task) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return self.wait_for_task(task_ref) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return evt.wait() [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] result = hub.switch() [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return self.greenlet.switch() [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self.f(*self.args, **self.kw) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] raise exceptions.translate_fault(task_info.error) [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Faults: ['InvalidArgument'] [ 897.169059] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] [ 897.170057] env[69227]: INFO nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Terminating instance [ 897.170893] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 897.171113] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.171351] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d70d9f0-c836-4e2b-9375-554b96f43c07 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.173393] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 897.173581] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 897.174287] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a47dd33-db22-4a78-801c-778dc50ca31f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.182169] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 897.182376] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f619451f-1b24-4e2d-a5ba-365bdbd4f081 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.184621] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.184794] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 897.185698] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b745afbb-db1d-4465-8cc4-f4d8a7a4edbb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.190502] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for the task: (returnval){ [ 897.190502] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52503aa5-4dd4-606b-7b72-7ee9332bd40d" [ 897.190502] env[69227]: _type = "Task" [ 897.190502] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.198680] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52503aa5-4dd4-606b-7b72-7ee9332bd40d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.255765] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 897.256070] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 897.256298] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleting the datastore file [datastore2] a1095b15-f871-4dd2-9712-330d26ba4143 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.256606] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a3da743b-5f3c-4f7a-97c7-448fb2227e84 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.263840] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 897.263840] env[69227]: value = "task-3475029" [ 897.263840] env[69227]: _type = "Task" [ 897.263840] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.271567] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.701411] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 897.701689] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Creating directory with path [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 897.701893] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e63dc270-3ec9-4c72-8dbb-08bed2dba655 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.714059] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Created directory with path [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 897.714059] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Fetch image to [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 897.714059] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 897.714358] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bd0697-3b12-4774-8ea4-68c13ab2de43 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.722785] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36af51c3-324c-4840-9964-877410a93dfa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.731807] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3b12bd-94fc-4088-b0a4-7ad7997baf39 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.762389] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc130c1-2a4d-4a0e-92e8-1b962638c165 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.774221] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d35e152f-07ea-4cb3-bedb-ba941b3067a3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.775387] env[69227]: DEBUG oslo_vmware.api [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063241} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.775626] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 897.775806] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 897.775976] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 897.776167] env[69227]: INFO nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Took 0.60 seconds to destroy the instance on the hypervisor. [ 897.778983] env[69227]: DEBUG nova.compute.claims [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 897.778983] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.779252] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.796015] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 897.904466] env[69227]: DEBUG oslo_vmware.rw_handles [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 897.975492] env[69227]: DEBUG oslo_vmware.rw_handles [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 897.975832] env[69227]: DEBUG oslo_vmware.rw_handles [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 898.545549] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-608561a9-3610-4087-92a0-827e94a4945c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.554566] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45897833-ab52-4951-935c-90114b824dae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.583863] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cbe950c-509b-4599-8ffa-935f128c5038 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.591464] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22261aaa-fef7-4359-bd2c-0ee50191bb3f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.604298] env[69227]: DEBUG nova.compute.provider_tree [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 899.108214] env[69227]: DEBUG nova.scheduler.client.report [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 899.612988] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.834s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.613493] env[69227]: ERROR nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 899.613493] env[69227]: Faults: ['InvalidArgument'] [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Traceback (most recent call last): [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self.driver.spawn(context, instance, image_meta, [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self._vmops.spawn(context, instance, image_meta, injected_files, [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self._fetch_image_if_missing(context, vi) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] image_cache(vi, tmp_image_ds_loc) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] vm_util.copy_virtual_disk( [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] session._wait_for_task(vmdk_copy_task) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return self.wait_for_task(task_ref) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return evt.wait() [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] result = hub.switch() [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] return self.greenlet.switch() [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] self.f(*self.args, **self.kw) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] raise exceptions.translate_fault(task_info.error) [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Faults: ['InvalidArgument'] [ 899.613493] env[69227]: ERROR nova.compute.manager [instance: a1095b15-f871-4dd2-9712-330d26ba4143] [ 899.614520] env[69227]: DEBUG nova.compute.utils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 899.616056] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Build of instance a1095b15-f871-4dd2-9712-330d26ba4143 was re-scheduled: A specified parameter was not correct: fileType [ 899.616056] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 899.616420] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 899.616589] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 899.616745] env[69227]: DEBUG nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 899.616905] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 899.624452] env[69227]: WARNING oslo_vmware.rw_handles [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 899.624452] env[69227]: ERROR oslo_vmware.rw_handles [ 899.624928] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore1 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 899.625948] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 899.626190] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Copying Virtual Disk [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore1] vmware_temp/8bf29aa7-d0a4-46fd-8818-3a4ea3559876/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 899.626438] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8b749a3-deed-4393-8749-8f5f1d1b989e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.635678] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for the task: (returnval){ [ 899.635678] env[69227]: value = "task-3475030" [ 899.635678] env[69227]: _type = "Task" [ 899.635678] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.646240] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Task: {'id': task-3475030, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.145787] env[69227]: DEBUG oslo_vmware.exceptions [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 900.146035] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.146636] env[69227]: ERROR nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 900.146636] env[69227]: Faults: ['InvalidArgument'] [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Traceback (most recent call last): [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] yield resources [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self.driver.spawn(context, instance, image_meta, [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self._fetch_image_if_missing(context, vi) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] image_cache(vi, tmp_image_ds_loc) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] vm_util.copy_virtual_disk( [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] session._wait_for_task(vmdk_copy_task) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return self.wait_for_task(task_ref) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return evt.wait() [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] result = hub.switch() [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return self.greenlet.switch() [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self.f(*self.args, **self.kw) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] raise exceptions.translate_fault(task_info.error) [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Faults: ['InvalidArgument'] [ 900.146636] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] [ 900.147811] env[69227]: INFO nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Terminating instance [ 900.149942] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 900.150206] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 900.151010] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83aff424-4096-4a68-ae76-d926ccf9993b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.158990] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 900.159274] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec4e0d11-bf6a-44de-bfa5-727b6dd5b2f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.217710] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 900.217942] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Deleting contents of the VM from datastore datastore1 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 900.218144] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Deleting the datastore file [datastore1] a7fa6db1-6f80-4f30-84b1-6179b0774889 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 900.218415] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9277c030-6ac9-4859-becb-e1ca93904918 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.225317] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for the task: (returnval){ [ 900.225317] env[69227]: value = "task-3475032" [ 900.225317] env[69227]: _type = "Task" [ 900.225317] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.233388] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Task: {'id': task-3475032, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.460359] env[69227]: DEBUG nova.network.neutron [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.735973] env[69227]: DEBUG oslo_vmware.api [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Task: {'id': task-3475032, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066143} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.736166] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.736335] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Deleted contents of the VM from datastore datastore1 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 900.736504] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 900.736679] env[69227]: INFO nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Took 0.59 seconds to destroy the instance on the hypervisor. [ 900.738759] env[69227]: DEBUG nova.compute.claims [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 900.739067] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.739277] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.963575] env[69227]: INFO nova.compute.manager [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Took 1.35 seconds to deallocate network for instance. [ 901.519847] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72259eaf-2e06-453b-9447-57b6eb9e53d8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.527276] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556069a7-235e-41df-912c-ed065e2b7b4a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.558269] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea8b8ec-b43e-44b4-bc9c-9d9a49b89cd2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.564947] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1b28721-fbd8-4715-827e-154b0b168a4a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.578704] env[69227]: DEBUG nova.compute.provider_tree [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.993680] env[69227]: INFO nova.scheduler.client.report [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleted allocations for instance a1095b15-f871-4dd2-9712-330d26ba4143 [ 902.082243] env[69227]: DEBUG nova.scheduler.client.report [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 902.500979] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3c7ba846-6156-4d7e-ba72-9fe7e714e4d0 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 297.665s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.502347] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 100.595s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.502583] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.502793] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.502959] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.504947] env[69227]: INFO nova.compute.manager [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Terminating instance [ 902.506865] env[69227]: DEBUG nova.compute.manager [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 902.507186] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 902.507337] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d467e9bd-9342-43d0-86bb-010e3e65a627 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.516238] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-337a7bc8-68bb-43b0-87b5-e46301be3321 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.543875] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a1095b15-f871-4dd2-9712-330d26ba4143 could not be found. [ 902.544157] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 902.544290] env[69227]: INFO nova.compute.manager [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Took 0.04 seconds to destroy the instance on the hypervisor. [ 902.544528] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.544751] env[69227]: DEBUG nova.compute.manager [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 902.544848] env[69227]: DEBUG nova.network.neutron [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 902.586629] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.847s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.587204] env[69227]: ERROR nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.587204] env[69227]: Faults: ['InvalidArgument'] [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Traceback (most recent call last): [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self.driver.spawn(context, instance, image_meta, [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self._vmops.spawn(context, instance, image_meta, injected_files, [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self._fetch_image_if_missing(context, vi) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] image_cache(vi, tmp_image_ds_loc) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] vm_util.copy_virtual_disk( [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] session._wait_for_task(vmdk_copy_task) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return self.wait_for_task(task_ref) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return evt.wait() [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] result = hub.switch() [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] return self.greenlet.switch() [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] self.f(*self.args, **self.kw) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] raise exceptions.translate_fault(task_info.error) [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Faults: ['InvalidArgument'] [ 902.587204] env[69227]: ERROR nova.compute.manager [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] [ 902.588381] env[69227]: DEBUG nova.compute.utils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 902.589421] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Build of instance a7fa6db1-6f80-4f30-84b1-6179b0774889 was re-scheduled: A specified parameter was not correct: fileType [ 902.589421] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 902.589833] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 902.590017] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 902.590196] env[69227]: DEBUG nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 902.590360] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 903.006319] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 903.059868] env[69227]: DEBUG nova.network.neutron [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.352831] env[69227]: DEBUG nova.network.neutron [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.526075] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.526356] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.528717] env[69227]: INFO nova.compute.claims [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.562901] env[69227]: INFO nova.compute.manager [-] [instance: a1095b15-f871-4dd2-9712-330d26ba4143] Took 1.02 seconds to deallocate network for instance. [ 903.597913] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.598161] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.856718] env[69227]: INFO nova.compute.manager [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Took 1.27 seconds to deallocate network for instance. [ 904.594099] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1d7bfda7-1dd7-4e47-8ece-1f9c1fbc02c6 tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "a1095b15-f871-4dd2-9712-330d26ba4143" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.092s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.812489] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df67a67a-d519-432f-bbfa-bd8dcb94b2fb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.820190] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ed9101-2593-4a77-93bc-5ebf73ce34b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.849338] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b243062-0c2e-4156-9a28-f7cf280e8f83 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.856280] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c32b1cd2-3044-45cc-a7dc-472cd7a82a39 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.872422] env[69227]: DEBUG nova.compute.provider_tree [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.889440] env[69227]: INFO nova.scheduler.client.report [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Deleted allocations for instance a7fa6db1-6f80-4f30-84b1-6179b0774889 [ 905.376433] env[69227]: DEBUG nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 905.397549] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1609450a-75b8-4d9f-b219-0c01c51c7648 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 255.551s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.398953] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 58.197s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.399132] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.399450] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.399608] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.401607] env[69227]: INFO nova.compute.manager [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Terminating instance [ 905.402712] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquiring lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.402917] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Acquired lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.403034] env[69227]: DEBUG nova.network.neutron [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 905.882922] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.883494] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 905.901774] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 905.969555] env[69227]: DEBUG nova.network.neutron [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.078154] env[69227]: DEBUG nova.network.neutron [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 906.387999] env[69227]: DEBUG nova.compute.utils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 906.390203] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 906.390203] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 906.426324] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.426620] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.428121] env[69227]: INFO nova.compute.claims [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 906.447588] env[69227]: DEBUG nova.policy [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ae1dad573114c2f8238da7a4bf0ecbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f28aa5cc98d46e69e144ccd9c8e2931', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 906.581419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Releasing lock "refresh_cache-a7fa6db1-6f80-4f30-84b1-6179b0774889" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.581849] env[69227]: DEBUG nova.compute.manager [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.582126] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 906.582448] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d84bc497-417e-427d-8245-ba1dd4afb827 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.591553] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efa62b59-1545-4346-862b-9a08b45ffbd7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.619420] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a7fa6db1-6f80-4f30-84b1-6179b0774889 could not be found. [ 906.619698] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 906.619886] env[69227]: INFO nova.compute.manager [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Took 0.04 seconds to destroy the instance on the hypervisor. [ 906.620148] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 906.620369] env[69227]: DEBUG nova.compute.manager [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.620463] env[69227]: DEBUG nova.network.neutron [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 906.639020] env[69227]: DEBUG nova.network.neutron [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.892104] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Successfully created port: 50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.894360] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 907.140284] env[69227]: DEBUG nova.network.neutron [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.643033] env[69227]: INFO nova.compute.manager [-] [instance: a7fa6db1-6f80-4f30-84b1-6179b0774889] Took 1.02 seconds to deallocate network for instance. [ 907.740225] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d5063c-a696-47f1-8edf-158afc895076 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.749210] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc76632a-294b-4c88-815c-2e088d53b4b1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.778722] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ee229e-2c1b-4658-8ee9-e348d35239fe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.786152] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be140377-0281-47a0-ae31-5e29da1e6d82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.800254] env[69227]: DEBUG nova.compute.provider_tree [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.904398] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 907.933610] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 907.933853] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 907.934016] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 907.934237] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 907.934381] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 907.934523] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 907.934728] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 907.934880] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 907.935058] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 907.935221] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 907.935389] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 907.936336] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ca3a20-e17d-45f0-899c-3141cf9c325d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.948029] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a9b0ee-7bb9-45e0-8609-d2c15ef9b44d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.303011] env[69227]: DEBUG nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 908.434398] env[69227]: DEBUG nova.compute.manager [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Received event network-vif-plugged-50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 908.434398] env[69227]: DEBUG oslo_concurrency.lockutils [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] Acquiring lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.434398] env[69227]: DEBUG oslo_concurrency.lockutils [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.434398] env[69227]: DEBUG oslo_concurrency.lockutils [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.434398] env[69227]: DEBUG nova.compute.manager [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] No waiting events found dispatching network-vif-plugged-50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 908.434398] env[69227]: WARNING nova.compute.manager [req-d8075b0f-8325-43d9-bc19-0c0f6c674313 req-cfef835d-acf9-4ade-96e1-28641ecaaf6f service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Received unexpected event network-vif-plugged-50e524c1-4e9f-4613-8416-c04f46419083 for instance with vm_state building and task_state spawning. [ 908.669386] env[69227]: DEBUG oslo_concurrency.lockutils [None req-77d09295-380b-4544-a206-b941fdc83372 tempest-FloatingIPsAssociationTestJSON-1516066495 tempest-FloatingIPsAssociationTestJSON-1516066495-project-member] Lock "a7fa6db1-6f80-4f30-84b1-6179b0774889" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.269s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.697491] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Successfully updated port: 50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 908.807917] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.808434] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 909.199962] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.200291] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.200291] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 909.312735] env[69227]: DEBUG nova.compute.utils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 909.314102] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 909.314285] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 909.364302] env[69227]: DEBUG nova.policy [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ae1dad573114c2f8238da7a4bf0ecbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f28aa5cc98d46e69e144ccd9c8e2931', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 909.648499] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Successfully created port: 70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 909.783589] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.817591] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 909.945844] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Updating instance_info_cache with network_info: [{"id": "50e524c1-4e9f-4613-8416-c04f46419083", "address": "fa:16:3e:c6:5f:50", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e524c1-4e", "ovs_interfaceid": "50e524c1-4e9f-4613-8416-c04f46419083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.426660] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 910.449031] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.449031] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance network_info: |[{"id": "50e524c1-4e9f-4613-8416-c04f46419083", "address": "fa:16:3e:c6:5f:50", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e524c1-4e", "ovs_interfaceid": "50e524c1-4e9f-4613-8416-c04f46419083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 910.449407] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c6:5f:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50e524c1-4e9f-4613-8416-c04f46419083', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.456975] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating folder: Project (3f28aa5cc98d46e69e144ccd9c8e2931). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 910.458236] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-38ba87a2-42ad-46d2-8f17-58b12fdaf6c5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.461344] env[69227]: DEBUG nova.compute.manager [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Received event network-changed-50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 910.461527] env[69227]: DEBUG nova.compute.manager [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Refreshing instance network info cache due to event network-changed-50e524c1-4e9f-4613-8416-c04f46419083. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 910.461731] env[69227]: DEBUG oslo_concurrency.lockutils [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] Acquiring lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.461872] env[69227]: DEBUG oslo_concurrency.lockutils [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] Acquired lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.462037] env[69227]: DEBUG nova.network.neutron [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Refreshing network info cache for port 50e524c1-4e9f-4613-8416-c04f46419083 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.472400] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created folder: Project (3f28aa5cc98d46e69e144ccd9c8e2931) in parent group-v694623. [ 910.472578] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating folder: Instances. Parent ref: group-v694669. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 910.472802] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d578774-1b05-43ee-bb51-7ba4e8ea6814 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.482896] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created folder: Instances in parent group-v694669. [ 910.483139] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.483318] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 910.483506] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa53cc63-a122-48ef-866e-541b79492c84 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.502588] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.502588] env[69227]: value = "task-3475035" [ 910.502588] env[69227]: _type = "Task" [ 910.502588] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.510033] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475035, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.826405] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 910.850871] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 910.851112] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 910.851268] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.851447] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 910.851968] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.851968] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 910.851968] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 910.852143] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 910.852288] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 910.852451] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 910.852622] env[69227]: DEBUG nova.virt.hardware [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 910.853485] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a407095-695a-47c2-83a5-9ed495edc4cd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.861676] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140da94a-ac7b-479c-897c-80434e1d04a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.012920] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475035, 'name': CreateVM_Task, 'duration_secs': 0.283513} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.013118] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 911.013779] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.013948] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.014301] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 911.014575] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a67817d9-8b69-420d-a8fe-474a414d5d58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.019063] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 911.019063] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52cad982-aefe-7c0e-0b93-d0299e50d54e" [ 911.019063] env[69227]: _type = "Task" [ 911.019063] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.026693] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52cad982-aefe-7c0e-0b93-d0299e50d54e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.208998] env[69227]: DEBUG nova.network.neutron [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Updated VIF entry in instance network info cache for port 50e524c1-4e9f-4613-8416-c04f46419083. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 911.209350] env[69227]: DEBUG nova.network.neutron [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Updating instance_info_cache with network_info: [{"id": "50e524c1-4e9f-4613-8416-c04f46419083", "address": "fa:16:3e:c6:5f:50", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50e524c1-4e", "ovs_interfaceid": "50e524c1-4e9f-4613-8416-c04f46419083", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.223526] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Successfully updated port: 70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.530483] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.530483] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.530772] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.712596] env[69227]: DEBUG oslo_concurrency.lockutils [req-547fa9e6-01c7-4b70-887a-790ca6071b8b req-d33bb6a3-a303-4ea9-b68d-f4bbbe980d08 service nova] Releasing lock "refresh_cache-fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.725569] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.725743] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.725898] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 912.269082] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.416848] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Updating instance_info_cache with network_info: [{"id": "70db4b50-53b8-4a2e-8b56-95163df940fd", "address": "fa:16:3e:4e:95:7b", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70db4b50-53", "ovs_interfaceid": "70db4b50-53b8-4a2e-8b56-95163df940fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.426610] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.426836] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 912.481816] env[69227]: DEBUG nova.compute.manager [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Received event network-vif-plugged-70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 912.481884] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Acquiring lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.482194] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.482382] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.482547] env[69227]: DEBUG nova.compute.manager [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] No waiting events found dispatching network-vif-plugged-70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 912.482712] env[69227]: WARNING nova.compute.manager [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Received unexpected event network-vif-plugged-70db4b50-53b8-4a2e-8b56-95163df940fd for instance with vm_state building and task_state spawning. [ 912.482886] env[69227]: DEBUG nova.compute.manager [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Received event network-changed-70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 912.483053] env[69227]: DEBUG nova.compute.manager [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Refreshing instance network info cache due to event network-changed-70db4b50-53b8-4a2e-8b56-95163df940fd. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 912.483211] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Acquiring lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.919945] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.920330] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance network_info: |[{"id": "70db4b50-53b8-4a2e-8b56-95163df940fd", "address": "fa:16:3e:4e:95:7b", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70db4b50-53", "ovs_interfaceid": "70db4b50-53b8-4a2e-8b56-95163df940fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 912.920702] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Acquired lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.920891] env[69227]: DEBUG nova.network.neutron [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Refreshing network info cache for port 70db4b50-53b8-4a2e-8b56-95163df940fd {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 912.922199] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:95:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '310b8ba9-edca-4135-863e-f4a786dd4a77', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70db4b50-53b8-4a2e-8b56-95163df940fd', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.929684] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 912.933129] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.933453] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 912.933502] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 912.933654] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 912.933886] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 912.934884] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ecc52d-4fdf-4eda-a0c3-c02cf63a48b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.937713] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479a7e52-9c12-4147-8209-367b186dede6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.961876] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94518bc-5fde-4e69-bb9d-bfe5de6f48ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.965650] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.965650] env[69227]: value = "task-3475036" [ 912.965650] env[69227]: _type = "Task" [ 912.965650] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.978394] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c1d6f6-85ce-4c58-bd7c-c5955699697d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.983677] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475036, 'name': CreateVM_Task} progress is 6%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.988836] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9df2e7e-34ea-4d48-ac08-a33ca2e9a4d2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.021819] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180952MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 913.021897] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.022114] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.205710] env[69227]: DEBUG nova.network.neutron [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Updated VIF entry in instance network info cache for port 70db4b50-53b8-4a2e-8b56-95163df940fd. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.205948] env[69227]: DEBUG nova.network.neutron [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Updating instance_info_cache with network_info: [{"id": "70db4b50-53b8-4a2e-8b56-95163df940fd", "address": "fa:16:3e:4e:95:7b", "network": {"id": "6945d254-6d7f-4850-a093-6028409cc6fe", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1895631242-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f28aa5cc98d46e69e144ccd9c8e2931", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "310b8ba9-edca-4135-863e-f4a786dd4a77", "external-id": "nsx-vlan-transportzone-768", "segmentation_id": 768, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70db4b50-53", "ovs_interfaceid": "70db4b50-53b8-4a2e-8b56-95163df940fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.476289] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475036, 'name': CreateVM_Task, 'duration_secs': 0.295717} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.476775] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 913.477438] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.477632] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.477949] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 913.478205] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc7179a7-75f2-42d4-bc6f-825ee0877bb9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.482434] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 913.482434] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52076b8a-63fd-2cfc-5f3b-96b9144269ec" [ 913.482434] env[69227]: _type = "Task" [ 913.482434] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.491761] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52076b8a-63fd-2cfc-5f3b-96b9144269ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.709570] env[69227]: DEBUG oslo_concurrency.lockutils [req-138de310-3d7c-49b8-b36b-28eec8239034 req-7230837b-ff99-4cac-a5c0-328879c18cf2 service nova] Releasing lock "refresh_cache-3a8be11c-6a0e-4dbb-97c0-4290a2716487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.992174] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.992440] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.992635] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.048774] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.048949] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049090] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049230] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049318] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049429] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049547] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049698] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049818] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.049927] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 914.131769] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.131948] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.268959] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cdc9a9cd-5716-445d-a320-f1f0ee03916b tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "0da7dce2-7016-4020-8b9c-15b1e2f5f349" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.269197] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cdc9a9cd-5716-445d-a320-f1f0ee03916b tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "0da7dce2-7016-4020-8b9c-15b1e2f5f349" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.552835] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 915.056295] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 915.559827] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b79868a-be93-4c85-bac0-4167c4ea9b2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.062835] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74713144-66f6-4513-bac5-379f4a1b1cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 916.566181] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43ec99d7-fc56-493f-b845-710027a320c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.069113] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 917.572850] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance de21ad29-1e75-44b6-b1d8-ba0e702a7fe2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 918.076382] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e77cfa8f-4678-4fa0-9cc8-750895c85013 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 918.579087] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e04f79e6-e7af-4d94-aed0-3f6aecbd8806 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 919.081812] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 940ca4b4-783b-4527-8559-d00d9e48fd05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 919.584746] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4f6ce1c9-4790-4901-8462-1b24f52ef54e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.087725] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 532fb3f7-f71e-4906-bf1a-c15f9762c04a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 920.590492] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 921.093185] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 921.595961] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.103065] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0da7dce2-7016-4020-8b9c-15b1e2f5f349 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 922.103065] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 922.103065] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 922.387238] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e0b78b1-46f9-4db6-be85-f52c7461ebb7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.395370] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e9bd069-0370-4514-a7a3-b24ed6531a5c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.425978] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a697b90-fdb0-4a66-b6de-40341287bd53 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.433418] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0a885a-9e63-4384-a836-b97fe7fc960f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.447576] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.951868] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 923.456689] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 923.456689] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.435s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.452605] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.452895] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.959593] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 925.959593] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 925.959767] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 926.465851] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466071] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466172] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466295] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466416] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466537] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466656] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466798] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.466926] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.467055] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 926.467177] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 926.467379] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.467544] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.467544] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.467825] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 926.467947] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 944.238681] env[69227]: WARNING oslo_vmware.rw_handles [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 944.238681] env[69227]: ERROR oslo_vmware.rw_handles [ 944.239549] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 944.241129] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 944.241419] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Copying Virtual Disk [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/66ec0b54-0242-452b-8f95-5ab326d05738/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 944.241701] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-52062456-1b29-4686-b6e7-699880a6556c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.249122] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for the task: (returnval){ [ 944.249122] env[69227]: value = "task-3475037" [ 944.249122] env[69227]: _type = "Task" [ 944.249122] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.257703] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Task: {'id': task-3475037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.759763] env[69227]: DEBUG oslo_vmware.exceptions [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 944.760097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.760658] env[69227]: ERROR nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 944.760658] env[69227]: Faults: ['InvalidArgument'] [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Traceback (most recent call last): [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] yield resources [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self.driver.spawn(context, instance, image_meta, [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self._fetch_image_if_missing(context, vi) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] image_cache(vi, tmp_image_ds_loc) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] vm_util.copy_virtual_disk( [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] session._wait_for_task(vmdk_copy_task) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return self.wait_for_task(task_ref) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return evt.wait() [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] result = hub.switch() [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return self.greenlet.switch() [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self.f(*self.args, **self.kw) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] raise exceptions.translate_fault(task_info.error) [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Faults: ['InvalidArgument'] [ 944.760658] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] [ 944.761765] env[69227]: INFO nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Terminating instance [ 944.762619] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.762619] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 944.763206] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.763361] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 944.763530] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.764456] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75da232a-0a92-45e7-93ed-7b1dc96756c8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.772998] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 944.773196] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 944.773885] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-462d002a-9547-46e9-ace9-ae3c4072d110 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.778782] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 944.778782] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]523b2e36-8a44-f395-71f8-0b38f4165dac" [ 944.778782] env[69227]: _type = "Task" [ 944.778782] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.786430] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]523b2e36-8a44-f395-71f8-0b38f4165dac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.285446] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 945.290942] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 945.291161] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.291333] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-193b7ca9-8540-472d-8562-0bee88c161ca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.312831] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.313263] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Fetch image to [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 945.313263] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 945.314029] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8863eee-78f3-4a65-9ab0-998cf10d99a2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.321757] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2085db88-e3bc-4caf-ab11-533db0a87700 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.330020] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-874cad89-c083-4849-a24f-e005061317cd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.360712] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf2e70f-a30c-4915-abec-8e559f63f307 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.366032] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.368840] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5168d4cd-76d0-414b-8b0f-3c73bba6b795 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.389658] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 945.442727] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 945.503555] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 945.503735] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 945.870286] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Releasing lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.870724] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 945.870919] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 945.871796] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2e3006-fcc5-48a2-a238-78bd6cc4b9a8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.879876] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 945.880128] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2520eed3-5ff0-4ca4-90b7-da8dbe4143f3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.906711] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 945.906928] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 945.907140] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Deleting the datastore file [datastore2] 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.907364] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a7b30b45-cf66-47bd-bddf-7bd65c63aeeb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.913914] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for the task: (returnval){ [ 945.913914] env[69227]: value = "task-3475039" [ 945.913914] env[69227]: _type = "Task" [ 945.913914] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.921364] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Task: {'id': task-3475039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.423609] env[69227]: DEBUG oslo_vmware.api [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Task: {'id': task-3475039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035451} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.423907] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.424049] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 946.424229] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 946.424394] env[69227]: INFO nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Took 0.55 seconds to destroy the instance on the hypervisor. [ 946.424630] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.424838] env[69227]: DEBUG nova.compute.manager [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 946.426905] env[69227]: DEBUG nova.compute.claims [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 946.427083] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 946.427315] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.215880] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ccae5c8-bb16-4344-b2ba-9dc37bbe0bb1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.224918] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d2be25-b937-4ece-9973-5ca3ac21d203 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.254100] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb2fdfd-98eb-4309-a20c-89a1cddf2873 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.261295] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8eda632-fa52-40ea-b517-1e7dc9bd3501 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.274563] env[69227]: DEBUG nova.compute.provider_tree [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.778345] env[69227]: DEBUG nova.scheduler.client.report [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 948.283650] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.856s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.284283] env[69227]: ERROR nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 948.284283] env[69227]: Faults: ['InvalidArgument'] [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Traceback (most recent call last): [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self.driver.spawn(context, instance, image_meta, [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self._fetch_image_if_missing(context, vi) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] image_cache(vi, tmp_image_ds_loc) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] vm_util.copy_virtual_disk( [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] session._wait_for_task(vmdk_copy_task) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return self.wait_for_task(task_ref) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return evt.wait() [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] result = hub.switch() [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] return self.greenlet.switch() [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] self.f(*self.args, **self.kw) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] raise exceptions.translate_fault(task_info.error) [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Faults: ['InvalidArgument'] [ 948.284283] env[69227]: ERROR nova.compute.manager [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] [ 948.285375] env[69227]: DEBUG nova.compute.utils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 948.288859] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Build of instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 was re-scheduled: A specified parameter was not correct: fileType [ 948.288859] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 948.289276] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 948.289505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.289650] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 948.289806] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 948.813160] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 948.887373] env[69227]: DEBUG nova.network.neutron [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.390189] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Releasing lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 949.390481] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 949.390673] env[69227]: DEBUG nova.compute.manager [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 950.457494] env[69227]: INFO nova.scheduler.client.report [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Deleted allocations for instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 [ 950.964201] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4f0f0e61-cd6f-439f-8354-32041599d0d9 tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 345.929s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.965930] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 149.206s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.966207] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.966419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.966594] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.968673] env[69227]: INFO nova.compute.manager [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Terminating instance [ 950.970205] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquiring lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.970364] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Acquired lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.970534] env[69227]: DEBUG nova.network.neutron [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 951.472596] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 951.494143] env[69227]: DEBUG nova.network.neutron [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 951.560688] env[69227]: DEBUG nova.network.neutron [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.998649] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 951.999040] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.000564] env[69227]: INFO nova.compute.claims [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.066263] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Releasing lock "refresh_cache-9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.066730] env[69227]: DEBUG nova.compute.manager [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 952.066999] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 952.067667] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a26f8a0f-8bc6-45c9-b592-44f833694e0e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.077500] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecd7d21-55c3-43c8-961a-620ccdc5d614 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.105130] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1 could not be found. [ 952.105352] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 952.105534] env[69227]: INFO nova.compute.manager [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 952.105787] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 952.106032] env[69227]: DEBUG nova.compute.manager [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 952.106134] env[69227]: DEBUG nova.network.neutron [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 952.124010] env[69227]: DEBUG nova.network.neutron [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.626881] env[69227]: DEBUG nova.network.neutron [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.131135] env[69227]: INFO nova.compute.manager [-] [instance: 9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1] Took 1.02 seconds to deallocate network for instance. [ 953.343963] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e29e654-c68b-4b72-8889-b481d5a49626 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.351681] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570b971b-5964-4a78-921f-0a56dd142e03 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.389214] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705c172b-3702-4470-980c-fc73a2dbad57 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.397812] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c95a54-4103-499a-8605-da2ed0d93ea6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.412703] env[69227]: DEBUG nova.compute.provider_tree [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.917117] env[69227]: DEBUG nova.scheduler.client.report [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 954.161279] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4dbe9598-0b7b-40d4-8a6e-d99af1b3af1b tempest-ServerDiagnosticsV248Test-794020160 tempest-ServerDiagnosticsV248Test-794020160-project-member] Lock "9c1492ea-f9d4-4a75-bc32-ca3f958ec3f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.196s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.421449] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.421992] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 954.927563] env[69227]: DEBUG nova.compute.utils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 954.928997] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 954.929198] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 955.186835] env[69227]: DEBUG nova.policy [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65a24869274b400290ea69cbcf757cdb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6aa6d97dd6a44c0cbb0e3f9086feb430', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 955.435314] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 955.597658] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Successfully created port: 16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.448566] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 956.488204] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 956.488460] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 956.488623] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.488806] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 956.488989] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.489292] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 956.490356] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 956.490547] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 956.490728] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 956.490894] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 956.491085] env[69227]: DEBUG nova.virt.hardware [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.491988] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f2c703-ac41-49d4-a33d-478bd131f7a4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.506571] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b92c7b-ec1b-458f-bb51-554aa4873176 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.355919] env[69227]: DEBUG nova.compute.manager [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Received event network-vif-plugged-16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 957.356165] env[69227]: DEBUG oslo_concurrency.lockutils [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] Acquiring lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.356378] env[69227]: DEBUG oslo_concurrency.lockutils [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.356542] env[69227]: DEBUG oslo_concurrency.lockutils [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.356705] env[69227]: DEBUG nova.compute.manager [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] No waiting events found dispatching network-vif-plugged-16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 957.356873] env[69227]: WARNING nova.compute.manager [req-e32d5896-cda7-4c7b-9f8f-42ee85ab4e3b req-0ada07b5-1279-46a1-b080-0ce212d35c02 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Received unexpected event network-vif-plugged-16fbe04e-ec6b-4312-b6e1-3074ed105fb3 for instance with vm_state building and task_state spawning. [ 957.546828] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Successfully updated port: 16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.049828] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.050499] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquired lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.050499] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.589387] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 958.790766] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "859d632d-fb95-4ac6-9219-8768191979a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.790766] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.794754] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Updating instance_info_cache with network_info: [{"id": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "address": "fa:16:3e:83:82:bf", "network": {"id": "0212c41c-be99-445d-8c5b-38b774ded744", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1445769049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6aa6d97dd6a44c0cbb0e3f9086feb430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16fbe04e-ec", "ovs_interfaceid": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.241213] env[69227]: DEBUG oslo_concurrency.lockutils [None req-026fd92e-b7c3-4457-b808-c587ac22e4dc tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "fda81689-7e25-4d08-b0f4-58df21bb2a19" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.241457] env[69227]: DEBUG oslo_concurrency.lockutils [None req-026fd92e-b7c3-4457-b808-c587ac22e4dc tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "fda81689-7e25-4d08-b0f4-58df21bb2a19" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.298062] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Releasing lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.298062] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance network_info: |[{"id": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "address": "fa:16:3e:83:82:bf", "network": {"id": "0212c41c-be99-445d-8c5b-38b774ded744", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1445769049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6aa6d97dd6a44c0cbb0e3f9086feb430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16fbe04e-ec", "ovs_interfaceid": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 959.298292] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:82:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd1da5fc2-0280-4f76-ac97-20ea4bc7bb16', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16fbe04e-ec6b-4312-b6e1-3074ed105fb3', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.305800] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Creating folder: Project (6aa6d97dd6a44c0cbb0e3f9086feb430). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 959.306397] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e7d7faf-7853-4df6-96dd-bdae8a873547 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.317557] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Created folder: Project (6aa6d97dd6a44c0cbb0e3f9086feb430) in parent group-v694623. [ 959.317749] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Creating folder: Instances. Parent ref: group-v694673. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 959.317970] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-024fa267-5030-468e-bb8d-e6e41d93955f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326604] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Created folder: Instances in parent group-v694673. [ 959.327288] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.327362] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 959.328399] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d614905-71c1-4b92-9fac-3cc42cadd5db {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.345397] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.345397] env[69227]: value = "task-3475042" [ 959.345397] env[69227]: _type = "Task" [ 959.345397] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.353857] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475042, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.379767] env[69227]: DEBUG nova.compute.manager [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Received event network-changed-16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 959.379767] env[69227]: DEBUG nova.compute.manager [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Refreshing instance network info cache due to event network-changed-16fbe04e-ec6b-4312-b6e1-3074ed105fb3. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 959.379767] env[69227]: DEBUG oslo_concurrency.lockutils [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] Acquiring lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.379767] env[69227]: DEBUG oslo_concurrency.lockutils [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] Acquired lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.379767] env[69227]: DEBUG nova.network.neutron [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Refreshing network info cache for port 16fbe04e-ec6b-4312-b6e1-3074ed105fb3 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 959.856450] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475042, 'name': CreateVM_Task, 'duration_secs': 0.27288} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.856973] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 959.857809] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.858265] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.858811] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 959.859253] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51c9801-3791-460a-95a1-2d2190ef8dda {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.864342] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for the task: (returnval){ [ 959.864342] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52a686db-4097-4063-e923-0d96dd04b5e3" [ 959.864342] env[69227]: _type = "Task" [ 959.864342] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.872665] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52a686db-4097-4063-e923-0d96dd04b5e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.156664] env[69227]: DEBUG nova.network.neutron [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Updated VIF entry in instance network info cache for port 16fbe04e-ec6b-4312-b6e1-3074ed105fb3. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 960.156664] env[69227]: DEBUG nova.network.neutron [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Updating instance_info_cache with network_info: [{"id": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "address": "fa:16:3e:83:82:bf", "network": {"id": "0212c41c-be99-445d-8c5b-38b774ded744", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1445769049-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6aa6d97dd6a44c0cbb0e3f9086feb430", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d1da5fc2-0280-4f76-ac97-20ea4bc7bb16", "external-id": "nsx-vlan-transportzone-563", "segmentation_id": 563, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16fbe04e-ec", "ovs_interfaceid": "16fbe04e-ec6b-4312-b6e1-3074ed105fb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.376267] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.376538] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.376749] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.658541] env[69227]: DEBUG oslo_concurrency.lockutils [req-167952b2-7da6-4145-8f32-f79779e585a0 req-1471cda1-da25-4415-8f93-7c0322749e99 service nova] Releasing lock "refresh_cache-b6ffb3bc-196c-4ac2-b506-3fc514653c5e" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.831933] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "be8dae7e-b829-455a-b8d3-73fb04c40128" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.428064] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.426388] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.426619] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 972.930588] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.930588] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.931037] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.931037] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 972.932067] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a041c0b-61b0-485c-a0d7-6fe2e25fe776 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.940992] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b0cd0cd-db86-45cc-ae04-7d8620d52883 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.955342] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4035073-de1f-42f9-9040-3d98b1447482 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.961817] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c847d5-80d5-4ef5-b266-c1e6a6325a71 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.991833] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 972.991989] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.992242] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ddea4fd2-96b9-445c-939d-92c247247452 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028437] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028437] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028437] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.028437] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 974.531864] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.037955] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b79868a-be93-4c85-bac0-4167c4ea9b2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 975.541172] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74713144-66f6-4513-bac5-379f4a1b1cd1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 976.047112] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43ec99d7-fc56-493f-b845-710027a320c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 976.550243] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.053313] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance de21ad29-1e75-44b6-b1d8-ba0e702a7fe2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 977.557135] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e77cfa8f-4678-4fa0-9cc8-750895c85013 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 978.061770] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e04f79e6-e7af-4d94-aed0-3f6aecbd8806 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 978.564976] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 940ca4b4-783b-4527-8559-d00d9e48fd05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.068957] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4f6ce1c9-4790-4901-8462-1b24f52ef54e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 979.571560] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 532fb3f7-f71e-4906-bf1a-c15f9762c04a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.075859] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 980.578466] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 981.082336] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 981.586052] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0da7dce2-7016-4020-8b9c-15b1e2f5f349 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 982.088971] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 982.592411] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fda81689-7e25-4d08-b0f4-58df21bb2a19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 982.592683] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 982.592832] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 982.861621] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183ff660-d3a2-488e-9272-c81e8a1c2ec8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.868896] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e0aaef-ff2f-4fe7-9080-0230c05d9fdd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.898604] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd7b9f8-825a-43c7-a128-17734b5d2577 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.905603] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c6a311e-5c12-4b71-a661-9f6195757128 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.918928] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 983.421847] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 983.926737] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 983.927049] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.935s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.924062] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.924062] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 986.924062] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 986.924062] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 987.429695] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.429813] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430374] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430374] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430374] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430374] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430567] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430597] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430706] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430820] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 987.430937] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 987.431151] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.431323] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.431502] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.431735] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 987.431915] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 991.677173] env[69227]: WARNING oslo_vmware.rw_handles [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 991.677173] env[69227]: ERROR oslo_vmware.rw_handles [ 991.677779] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 991.679682] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 991.679916] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Copying Virtual Disk [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/66d5e5f5-68a8-49a8-9cef-52f3fcf685b3/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 991.680198] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e18a4244-c9dc-4f42-83aa-c5b9f43df962 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.688446] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 991.688446] env[69227]: value = "task-3475043" [ 991.688446] env[69227]: _type = "Task" [ 991.688446] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.695872] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.199057] env[69227]: DEBUG oslo_vmware.exceptions [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 992.199057] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.199608] env[69227]: ERROR nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.199608] env[69227]: Faults: ['InvalidArgument'] [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] Traceback (most recent call last): [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] yield resources [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self.driver.spawn(context, instance, image_meta, [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self._vmops.spawn(context, instance, image_meta, injected_files, [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self._fetch_image_if_missing(context, vi) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] image_cache(vi, tmp_image_ds_loc) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] vm_util.copy_virtual_disk( [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] session._wait_for_task(vmdk_copy_task) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return self.wait_for_task(task_ref) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return evt.wait() [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] result = hub.switch() [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return self.greenlet.switch() [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self.f(*self.args, **self.kw) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] raise exceptions.translate_fault(task_info.error) [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] Faults: ['InvalidArgument'] [ 992.199608] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] [ 992.200647] env[69227]: INFO nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Terminating instance [ 992.201419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.201650] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.201882] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-456a7fb8-83a3-4d6d-97e4-5b3ea29598a6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.203981] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 992.204196] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 992.204901] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854ed958-a8db-449f-b3e4-d65a89ea0de7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.212265] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 992.213359] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97c59c38-47ec-4461-b6bb-04b062f737ba {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.214660] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.214822] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 992.215484] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75fecda7-92a6-4402-84bf-29fec039f29e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.220985] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 992.220985] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52dbb986-3126-9091-731c-6d0ba65e4f3c" [ 992.220985] env[69227]: _type = "Task" [ 992.220985] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.227222] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52dbb986-3126-9091-731c-6d0ba65e4f3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.289686] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 992.290084] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 992.290398] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleting the datastore file [datastore2] ddea4fd2-96b9-445c-939d-92c247247452 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 992.290688] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c62f7982-9bc9-4332-a1f1-4510742e079f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.296523] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 992.296523] env[69227]: value = "task-3475045" [ 992.296523] env[69227]: _type = "Task" [ 992.296523] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.304614] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.730189] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 992.730501] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating directory with path [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.730710] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c0da985-5456-4fad-b71c-af8a60e6a4f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.743095] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Created directory with path [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.743319] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Fetch image to [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 992.743504] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 992.744231] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb49610e-6c44-437d-bcce-02b9ed04ba9c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.750578] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817ea2d2-6820-4a69-b4ec-4f1d21fba32d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.759205] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f3460-4071-4fa7-8b28-d8aa1388c733 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.789334] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4f70d5-1921-40ae-9d86-e16d0c6aec6b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.794578] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-249a91ba-6f19-4ac2-beb3-0ec78f648d43 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.804358] env[69227]: DEBUG oslo_vmware.api [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.081763} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.804554] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 992.804726] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 992.804918] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 992.805113] env[69227]: INFO nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Took 0.60 seconds to destroy the instance on the hypervisor. [ 992.807170] env[69227]: DEBUG nova.compute.claims [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 992.807352] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.807596] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.815956] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 992.868021] env[69227]: DEBUG oslo_vmware.rw_handles [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 992.927514] env[69227]: DEBUG oslo_vmware.rw_handles [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 992.927698] env[69227]: DEBUG oslo_vmware.rw_handles [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 993.611550] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281d3fe3-bdf1-47de-9b29-15e9ee5d0584 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.619208] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80d48b3-63b4-499b-9f24-eafb273fdf9a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.649453] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6753ed37-1e1e-41e4-8a1d-d5bb5cf94608 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.656847] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4edfd38e-6fd3-4e90-828a-9bb5e1dc5a00 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.669610] env[69227]: DEBUG nova.compute.provider_tree [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 994.173077] env[69227]: DEBUG nova.scheduler.client.report [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 994.678193] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.870s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.678812] env[69227]: ERROR nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 994.678812] env[69227]: Faults: ['InvalidArgument'] [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] Traceback (most recent call last): [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self.driver.spawn(context, instance, image_meta, [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self._vmops.spawn(context, instance, image_meta, injected_files, [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self._fetch_image_if_missing(context, vi) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] image_cache(vi, tmp_image_ds_loc) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] vm_util.copy_virtual_disk( [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] session._wait_for_task(vmdk_copy_task) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return self.wait_for_task(task_ref) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return evt.wait() [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] result = hub.switch() [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] return self.greenlet.switch() [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] self.f(*self.args, **self.kw) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] raise exceptions.translate_fault(task_info.error) [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] Faults: ['InvalidArgument'] [ 994.678812] env[69227]: ERROR nova.compute.manager [instance: ddea4fd2-96b9-445c-939d-92c247247452] [ 994.679631] env[69227]: DEBUG nova.compute.utils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 994.681235] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Build of instance ddea4fd2-96b9-445c-939d-92c247247452 was re-scheduled: A specified parameter was not correct: fileType [ 994.681235] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 994.681606] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 994.681779] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 994.681948] env[69227]: DEBUG nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 994.682122] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 995.399846] env[69227]: DEBUG nova.network.neutron [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.902722] env[69227]: INFO nova.compute.manager [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: ddea4fd2-96b9-445c-939d-92c247247452] Took 1.22 seconds to deallocate network for instance. [ 996.938920] env[69227]: INFO nova.scheduler.client.report [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted allocations for instance ddea4fd2-96b9-445c-939d-92c247247452 [ 997.448062] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2ece4020-6d4e-4309-b217-d3ffcb40027b tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "ddea4fd2-96b9-445c-939d-92c247247452" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 392.414s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.950176] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 998.475027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.475027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.476707] env[69227]: INFO nova.compute.claims [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 999.776535] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0c79f8-df24-4e39-b8fb-7d783a1e452b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.785042] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea977511-bd94-43d3-9b5d-c06d3012ee13 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.818018] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c9bb38-7531-4023-bfde-452a6a4002e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.822498] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776b420d-2441-4af5-8f6d-0d8b7f5d037b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.835910] env[69227]: DEBUG nova.compute.provider_tree [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1000.339072] env[69227]: DEBUG nova.scheduler.client.report [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1000.844149] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.844688] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1001.349799] env[69227]: DEBUG nova.compute.utils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1001.351266] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1001.351433] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1001.388820] env[69227]: DEBUG nova.policy [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f18490b43e2a4f89b121b6a682b7e850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3eeddfd8cf394d49bd88536877399fff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1001.700403] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Successfully created port: 41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1001.854548] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1002.865502] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1002.900270] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1002.900270] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1002.900270] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.900544] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1002.900544] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.900689] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1002.902838] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1002.902838] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1002.902838] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1002.903065] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1002.903563] env[69227]: DEBUG nova.virt.hardware [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1002.904762] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a6c9be-ad55-4ad3-9632-b4dac29e4e66 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.915487] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4559b9f2-af68-4249-b10d-6bd8e3d8c979 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.160896] env[69227]: DEBUG nova.compute.manager [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Received event network-vif-plugged-41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1003.161141] env[69227]: DEBUG oslo_concurrency.lockutils [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] Acquiring lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.161349] env[69227]: DEBUG oslo_concurrency.lockutils [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.161514] env[69227]: DEBUG oslo_concurrency.lockutils [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.161678] env[69227]: DEBUG nova.compute.manager [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] No waiting events found dispatching network-vif-plugged-41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1003.161837] env[69227]: WARNING nova.compute.manager [req-55d0bbc7-bc75-4647-ba44-5426905eb962 req-1275d602-37a6-4d96-8ce1-12a47245f84c service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Received unexpected event network-vif-plugged-41ac73cb-7659-444f-b98f-7fdb4a4a9f9c for instance with vm_state building and task_state spawning. [ 1003.303114] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Successfully updated port: 41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1003.488185] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.488185] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.804725] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.806771] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1003.806771] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1004.415921] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1004.760234] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Updating instance_info_cache with network_info: [{"id": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "address": "fa:16:3e:86:42:43", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ac73cb-76", "ovs_interfaceid": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.195854] env[69227]: DEBUG nova.compute.manager [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Received event network-changed-41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1005.195854] env[69227]: DEBUG nova.compute.manager [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Refreshing instance network info cache due to event network-changed-41ac73cb-7659-444f-b98f-7fdb4a4a9f9c. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1005.195854] env[69227]: DEBUG oslo_concurrency.lockutils [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] Acquiring lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.270019] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.270019] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance network_info: |[{"id": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "address": "fa:16:3e:86:42:43", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ac73cb-76", "ovs_interfaceid": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1005.270019] env[69227]: DEBUG oslo_concurrency.lockutils [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] Acquired lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.270019] env[69227]: DEBUG nova.network.neutron [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Refreshing network info cache for port 41ac73cb-7659-444f-b98f-7fdb4a4a9f9c {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1005.270019] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:42:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41ac73cb-7659-444f-b98f-7fdb4a4a9f9c', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1005.280815] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating folder: Project (3eeddfd8cf394d49bd88536877399fff). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1005.282145] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17c18664-0749-4caa-9d14-dd4eb6227d82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.296240] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created folder: Project (3eeddfd8cf394d49bd88536877399fff) in parent group-v694623. [ 1005.296240] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating folder: Instances. Parent ref: group-v694676. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1005.296240] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00a087f7-629f-4217-a594-061245e4403e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.306516] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created folder: Instances in parent group-v694676. [ 1005.306887] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1005.307750] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1005.308073] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f89a93eb-350f-46da-ac5c-716c4b3693bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.335265] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1005.335265] env[69227]: value = "task-3475048" [ 1005.335265] env[69227]: _type = "Task" [ 1005.335265] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.342467] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475048, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.844830] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475048, 'name': CreateVM_Task, 'duration_secs': 0.32216} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.845189] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1005.845783] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.845989] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.846354] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1005.846638] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69f0ff6f-7343-4d88-a562-1712d8b0bb76 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.851213] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1005.851213] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524df491-e3e5-7b03-d4b1-c73f6d2d81c9" [ 1005.851213] env[69227]: _type = "Task" [ 1005.851213] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.858846] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524df491-e3e5-7b03-d4b1-c73f6d2d81c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.362062] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.362411] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1006.362665] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.468396] env[69227]: DEBUG nova.network.neutron [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Updated VIF entry in instance network info cache for port 41ac73cb-7659-444f-b98f-7fdb4a4a9f9c. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1006.468396] env[69227]: DEBUG nova.network.neutron [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Updating instance_info_cache with network_info: [{"id": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "address": "fa:16:3e:86:42:43", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41ac73cb-76", "ovs_interfaceid": "41ac73cb-7659-444f-b98f-7fdb4a4a9f9c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1006.974960] env[69227]: DEBUG oslo_concurrency.lockutils [req-4a49048e-e64c-4325-a62a-6ab3bcf66444 req-ba286121-78bd-4850-807d-139e2af98251 service nova] Releasing lock "refresh_cache-9c61d411-b6dd-43c9-a59a-8ff3030e6149" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.802809] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d72da386-f693-42ae-b05d-8e98f8724974 tempest-ServerRescueTestJSONUnderV235-1135676851 tempest-ServerRescueTestJSONUnderV235-1135676851-project-member] Acquiring lock "f9e2c26e-20e3-4333-8437-53dd42d1a1e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.803078] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d72da386-f693-42ae-b05d-8e98f8724974 tempest-ServerRescueTestJSONUnderV235-1135676851 tempest-ServerRescueTestJSONUnderV235-1135676851-project-member] Lock "f9e2c26e-20e3-4333-8437-53dd42d1a1e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.108563] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.434852] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.571532] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d5f4ba83-1cf5-4ea3-b368-520fae6836b8 tempest-ImagesNegativeTestJSON-434565976 tempest-ImagesNegativeTestJSON-434565976-project-member] Acquiring lock "3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.572805] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d5f4ba83-1cf5-4ea3-b368-520fae6836b8 tempest-ImagesNegativeTestJSON-434565976 tempest-ImagesNegativeTestJSON-434565976-project-member] Lock "3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.007251] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6f9b95ab-2c0b-4a62-a7aa-f37218642acb tempest-ServersTestManualDisk-770401789 tempest-ServersTestManualDisk-770401789-project-member] Acquiring lock "e5db927a-625f-49c5-8f82-041550fcac67" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.007651] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6f9b95ab-2c0b-4a62-a7aa-f37218642acb tempest-ServersTestManualDisk-770401789 tempest-ServersTestManualDisk-770401789-project-member] Lock "e5db927a-625f-49c5-8f82-041550fcac67" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.658642] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4ff2042e-5db6-4fa6-b980-d2cf2dbbea66 tempest-ServerMetadataNegativeTestJSON-1039436067 tempest-ServerMetadataNegativeTestJSON-1039436067-project-member] Acquiring lock "74c8d4ad-fa82-488d-aad6-b952061ef2c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.659723] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4ff2042e-5db6-4fa6-b980-d2cf2dbbea66 tempest-ServerMetadataNegativeTestJSON-1039436067 tempest-ServerMetadataNegativeTestJSON-1039436067-project-member] Lock "74c8d4ad-fa82-488d-aad6-b952061ef2c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.049543] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.094337] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.028619] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aab74667-45be-432b-a455-efd965708b91 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Acquiring lock "2b465fda-e6e8-473a-b17e-e5de876c171d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.028890] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aab74667-45be-432b-a455-efd965708b91 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "2b465fda-e6e8-473a-b17e-e5de876c171d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.427508] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1033.427140] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.344307] env[69227]: DEBUG oslo_concurrency.lockutils [None req-448222d2-301e-422a-b0e4-18a95c29d597 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Acquiring lock "094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.344530] env[69227]: DEBUG oslo_concurrency.lockutils [None req-448222d2-301e-422a-b0e4-18a95c29d597 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.426743] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1034.929769] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.930066] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.930222] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.930370] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1034.931353] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f75f6d2-282b-43d3-b2e4-e4743e46dffd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.941497] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469153e3-8689-4b9b-af4d-541b000978ba {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.956504] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb91fdc-e618-428b-a3b0-a8abeadd81a1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.962773] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21317861-9bdc-4e03-a1c5-ed30ed461604 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.992349] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1034.992510] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.992727] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.026818] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4005bdf5-3826-4214-9fa6-f794c4f043df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027108] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027143] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027245] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027358] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027470] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027582] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027692] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027804] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.027915] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1036.531266] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43ec99d7-fc56-493f-b845-710027a320c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.036222] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1037.540122] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance de21ad29-1e75-44b6-b1d8-ba0e702a7fe2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1038.042466] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e77cfa8f-4678-4fa0-9cc8-750895c85013 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1038.546967] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e04f79e6-e7af-4d94-aed0-3f6aecbd8806 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1038.808910] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4b9d06b-d6a8-43b5-be52-a161f3dcefb0 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "50908bce-98db-4f89-b4e2-81e059044088" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.809033] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4b9d06b-d6a8-43b5-be52-a161f3dcefb0 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "50908bce-98db-4f89-b4e2-81e059044088" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.051824] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 940ca4b4-783b-4527-8559-d00d9e48fd05 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1039.238516] env[69227]: WARNING oslo_vmware.rw_handles [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1039.238516] env[69227]: ERROR oslo_vmware.rw_handles [ 1039.239057] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1039.241516] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1039.241778] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Copying Virtual Disk [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/5c39f1eb-024a-4995-b871-7f25ef9d22ac/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1039.242158] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9ba4cdb-26fe-460c-8165-ae959a6ce52d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.251124] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 1039.251124] env[69227]: value = "task-3475049" [ 1039.251124] env[69227]: _type = "Task" [ 1039.251124] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.259229] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.555389] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 4f6ce1c9-4790-4901-8462-1b24f52ef54e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1039.762945] env[69227]: DEBUG oslo_vmware.exceptions [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1039.763150] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.763713] env[69227]: ERROR nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.763713] env[69227]: Faults: ['InvalidArgument'] [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Traceback (most recent call last): [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] yield resources [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self.driver.spawn(context, instance, image_meta, [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self._fetch_image_if_missing(context, vi) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] image_cache(vi, tmp_image_ds_loc) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] vm_util.copy_virtual_disk( [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] session._wait_for_task(vmdk_copy_task) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return self.wait_for_task(task_ref) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return evt.wait() [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] result = hub.switch() [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return self.greenlet.switch() [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self.f(*self.args, **self.kw) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] raise exceptions.translate_fault(task_info.error) [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Faults: ['InvalidArgument'] [ 1039.763713] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] [ 1039.765731] env[69227]: INFO nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Terminating instance [ 1039.765731] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.765731] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1039.765968] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f04c26ae-23ed-415a-b57e-e7ff56958357 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.769095] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1039.769380] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1039.770097] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5fb069-7303-4bbb-a0e3-131bc4b33cdb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.777974] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1039.778172] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1039.779295] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54692d4a-c1c6-43e1-8589-b4f238618d14 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.784019] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1039.784167] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef86a0df-035b-44a4-958c-78df475559ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.788069] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1039.788069] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5254fe55-c9ae-f33f-f171-1cbb9725173c" [ 1039.788069] env[69227]: _type = "Task" [ 1039.788069] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.796889] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5254fe55-c9ae-f33f-f171-1cbb9725173c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.857166] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1039.857395] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1039.857575] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleting the datastore file [datastore2] 4005bdf5-3826-4214-9fa6-f794c4f043df {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1039.857863] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b8e9d84-af08-44a9-a5f1-b71bc8dc784b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.864389] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for the task: (returnval){ [ 1039.864389] env[69227]: value = "task-3475051" [ 1039.864389] env[69227]: _type = "Task" [ 1039.864389] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.872584] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.061215] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 532fb3f7-f71e-4906-bf1a-c15f9762c04a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1040.298628] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1040.298883] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating directory with path [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1040.299134] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aef3a7fd-5801-43ab-9d51-f260a8f03bec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.310629] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created directory with path [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1040.311020] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Fetch image to [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1040.311020] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1040.312131] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fe3b7d-8ecd-48bc-a54e-f4b388e86949 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.325093] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ef154a-ecb8-46e4-af10-9728bb4bb83a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.334775] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59609fca-96b9-48b3-83aa-9570ff8be45b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.371617] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dcdcd4-0fc2-436e-95c3-0da36189eabd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.380777] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-5c51e2d2-129e-4b12-80d8-20cccde7b1b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.382647] env[69227]: DEBUG oslo_vmware.api [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Task: {'id': task-3475051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066158} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.382873] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1040.383062] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1040.383226] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1040.383387] env[69227]: INFO nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1040.385455] env[69227]: DEBUG nova.compute.claims [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1040.385642] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.403703] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1040.459996] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1040.522164] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1040.522341] env[69227]: DEBUG oslo_vmware.rw_handles [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1040.564104] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.067636] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1041.240945] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e16a568-a258-40f6-927d-c9e6dcdd4fd2 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.241242] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e16a568-a258-40f6-927d-c9e6dcdd4fd2 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.572028] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1042.074510] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0da7dce2-7016-4020-8b9c-15b1e2f5f349 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1042.577731] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1043.080770] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fda81689-7e25-4d08-b0f4-58df21bb2a19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1043.584341] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1044.087811] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f9e2c26e-20e3-4333-8437-53dd42d1a1e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1044.590862] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1044.659503] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8100984d-9fd7-4cf8-ac39-6c45a3e9cb2c tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "db33565c-80fa-419c-8f46-bb38e6b7e7ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.659764] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8100984d-9fd7-4cf8-ac39-6c45a3e9cb2c tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "db33565c-80fa-419c-8f46-bb38e6b7e7ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.094230] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e5db927a-625f-49c5-8f82-041550fcac67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1045.598013] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74c8d4ad-fa82-488d-aad6-b952061ef2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.101286] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2b465fda-e6e8-473a-b17e-e5de876c171d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.604877] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1046.605250] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1046.605431] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1046.904337] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67802d09-d8d7-4961-89d6-0b03cb3cfd85 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.912028] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6165b4-84ef-4ba1-865a-fd0d66edae13 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.941225] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e562fa-3112-4ff4-9d92-d1dccf5fe583 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.948449] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d03514-fcd4-4239-ad57-5a1431c592e3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.961176] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.463914] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1047.969150] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1047.969375] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.977s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.969649] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.584s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.783774] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a856f05-bdeb-4748-8e92-6d2a8d9f4090 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.791299] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c27941-1e81-4f96-a943-ff1d5807b16e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.821210] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a641969-a2d3-4529-bfa7-87c6b156b8c0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.827938] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8313edec-66ae-4e24-95ac-8a37f6a679d6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.840661] env[69227]: DEBUG nova.compute.provider_tree [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1049.343961] env[69227]: DEBUG nova.scheduler.client.report [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1049.848668] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.879s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1049.849120] env[69227]: ERROR nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.849120] env[69227]: Faults: ['InvalidArgument'] [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Traceback (most recent call last): [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self.driver.spawn(context, instance, image_meta, [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self._fetch_image_if_missing(context, vi) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] image_cache(vi, tmp_image_ds_loc) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] vm_util.copy_virtual_disk( [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] session._wait_for_task(vmdk_copy_task) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return self.wait_for_task(task_ref) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return evt.wait() [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] result = hub.switch() [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] return self.greenlet.switch() [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] self.f(*self.args, **self.kw) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] raise exceptions.translate_fault(task_info.error) [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Faults: ['InvalidArgument'] [ 1049.849120] env[69227]: ERROR nova.compute.manager [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] [ 1049.849905] env[69227]: DEBUG nova.compute.utils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1049.851677] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Build of instance 4005bdf5-3826-4214-9fa6-f794c4f043df was re-scheduled: A specified parameter was not correct: fileType [ 1049.851677] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1049.852060] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1049.852253] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1049.852435] env[69227]: DEBUG nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1049.852611] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1049.968264] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1049.968478] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.474983] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.474983] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1050.475128] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1050.632558] env[69227]: DEBUG nova.network.neutron [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1050.983036] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.983036] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.983036] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.983036] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.983036] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1051.135261] env[69227]: INFO nova.compute.manager [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Took 1.28 seconds to deallocate network for instance. [ 1052.167117] env[69227]: INFO nova.scheduler.client.report [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Deleted allocations for instance 4005bdf5-3826-4214-9fa6-f794c4f043df [ 1052.678451] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33630570-49bd-4695-a2fe-72e34a14bb29 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 445.286s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.680115] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 246.535s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.680385] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Acquiring lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.680598] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.680773] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.682662] env[69227]: INFO nova.compute.manager [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Terminating instance [ 1052.684308] env[69227]: DEBUG nova.compute.manager [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1052.684502] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1052.684838] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51d9779f-010c-4111-8e82-ba423e9500b1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.693532] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37c3e8d3-0389-4455-8319-a32b47642114 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.722636] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4005bdf5-3826-4214-9fa6-f794c4f043df could not be found. [ 1052.722838] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1052.723043] env[69227]: INFO nova.compute.manager [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1052.723301] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1052.723511] env[69227]: DEBUG nova.compute.manager [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1052.723604] env[69227]: DEBUG nova.network.neutron [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1053.184024] env[69227]: DEBUG nova.compute.manager [None req-d0a31355-d650-4585-8583-e47dce9b1c9c tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 0b79868a-be93-4c85-bac0-4167c4ea9b2b] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1053.239142] env[69227]: DEBUG nova.network.neutron [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.687985] env[69227]: DEBUG nova.compute.manager [None req-d0a31355-d650-4585-8583-e47dce9b1c9c tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 0b79868a-be93-4c85-bac0-4167c4ea9b2b] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1053.745074] env[69227]: INFO nova.compute.manager [-] [instance: 4005bdf5-3826-4214-9fa6-f794c4f043df] Took 1.02 seconds to deallocate network for instance. [ 1054.209593] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d0a31355-d650-4585-8583-e47dce9b1c9c tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "0b79868a-be93-4c85-bac0-4167c4ea9b2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.130s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.714594] env[69227]: DEBUG nova.compute.manager [None req-8639c8ce-4b8f-4dd6-b267-2fa392fc0f70 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 74713144-66f6-4513-bac5-379f4a1b1cd1] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1054.770364] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b27cb4-6f26-422d-9b8d-024305e4fe07 tempest-ListServerFiltersTestJSON-809999188 tempest-ListServerFiltersTestJSON-809999188-project-member] Lock "4005bdf5-3826-4214-9fa6-f794c4f043df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.089s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.216331] env[69227]: DEBUG nova.compute.manager [None req-8639c8ce-4b8f-4dd6-b267-2fa392fc0f70 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 74713144-66f6-4513-bac5-379f4a1b1cd1] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1055.732901] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8639c8ce-4b8f-4dd6-b267-2fa392fc0f70 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "74713144-66f6-4513-bac5-379f4a1b1cd1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.993s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.029114] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "5512573f-d965-4cb0-acfd-6ba248e4774f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.029114] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.235615] env[69227]: DEBUG nova.compute.manager [None req-7ca913cd-0ae0-4f4c-80fd-8c3d4d7ad6e6 tempest-ServersNegativeTestMultiTenantJSON-1615902621 tempest-ServersNegativeTestMultiTenantJSON-1615902621-project-member] [instance: 43ec99d7-fc56-493f-b845-710027a320c6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1056.741209] env[69227]: DEBUG nova.compute.manager [None req-7ca913cd-0ae0-4f4c-80fd-8c3d4d7ad6e6 tempest-ServersNegativeTestMultiTenantJSON-1615902621 tempest-ServersNegativeTestMultiTenantJSON-1615902621-project-member] [instance: 43ec99d7-fc56-493f-b845-710027a320c6] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1057.260681] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7ca913cd-0ae0-4f4c-80fd-8c3d4d7ad6e6 tempest-ServersNegativeTestMultiTenantJSON-1615902621 tempest-ServersNegativeTestMultiTenantJSON-1615902621-project-member] Lock "43ec99d7-fc56-493f-b845-710027a320c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.219s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.765560] env[69227]: DEBUG nova.compute.manager [None req-2e055c0f-8bcf-439d-83f7-a2736d4e10ed tempest-ImagesOneServerNegativeTestJSON-1775796980 tempest-ImagesOneServerNegativeTestJSON-1775796980-project-member] [instance: 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1058.276769] env[69227]: DEBUG nova.compute.manager [None req-2e055c0f-8bcf-439d-83f7-a2736d4e10ed tempest-ImagesOneServerNegativeTestJSON-1775796980 tempest-ImagesOneServerNegativeTestJSON-1775796980-project-member] [instance: 5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1058.315566] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c865cf00-48bf-41d7-9c55-7c46e7991be5 tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Acquiring lock "96086386-f894-4ce9-9fc0-ea710b5cca44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.315899] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c865cf00-48bf-41d7-9c55-7c46e7991be5 tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Lock "96086386-f894-4ce9-9fc0-ea710b5cca44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.799504] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e055c0f-8bcf-439d-83f7-a2736d4e10ed tempest-ImagesOneServerNegativeTestJSON-1775796980 tempest-ImagesOneServerNegativeTestJSON-1775796980-project-member] Lock "5f7343d5-0cff-4e2d-9ae2-8642fad2c5e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.639s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.862415] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b43390a-42a4-4311-ab7a-3304027a9b1a tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Acquiring lock "aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.862644] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b43390a-42a4-4311-ab7a-3304027a9b1a tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Lock "aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.305260] env[69227]: DEBUG nova.compute.manager [None req-ef622b4f-5fd9-456f-8c71-8d774207061f tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: de21ad29-1e75-44b6-b1d8-ba0e702a7fe2] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1059.807807] env[69227]: DEBUG nova.compute.manager [None req-ef622b4f-5fd9-456f-8c71-8d774207061f tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: de21ad29-1e75-44b6-b1d8-ba0e702a7fe2] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1060.322358] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ef622b4f-5fd9-456f-8c71-8d774207061f tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "de21ad29-1e75-44b6-b1d8-ba0e702a7fe2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.162s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.825555] env[69227]: DEBUG nova.compute.manager [None req-ca97291b-90aa-4a97-b6ae-f19979fc7637 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: e77cfa8f-4678-4fa0-9cc8-750895c85013] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1060.901407] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa7e9190-c269-420a-b8e2-49917857a4a4 tempest-InstanceActionsTestJSON-1348339765 tempest-InstanceActionsTestJSON-1348339765-project-member] Acquiring lock "a7f82862-e8aa-42f8-8bbe-38f068f0e6ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.901690] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa7e9190-c269-420a-b8e2-49917857a4a4 tempest-InstanceActionsTestJSON-1348339765 tempest-InstanceActionsTestJSON-1348339765-project-member] Lock "a7f82862-e8aa-42f8-8bbe-38f068f0e6ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.329670] env[69227]: DEBUG nova.compute.manager [None req-ca97291b-90aa-4a97-b6ae-f19979fc7637 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: e77cfa8f-4678-4fa0-9cc8-750895c85013] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1061.847952] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ca97291b-90aa-4a97-b6ae-f19979fc7637 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "e77cfa8f-4678-4fa0-9cc8-750895c85013" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 217.849s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.350766] env[69227]: DEBUG nova.compute.manager [None req-b6317fd3-ac53-462d-a029-9b11675c33f9 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: e04f79e6-e7af-4d94-aed0-3f6aecbd8806] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1062.858710] env[69227]: DEBUG nova.compute.manager [None req-b6317fd3-ac53-462d-a029-9b11675c33f9 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: e04f79e6-e7af-4d94-aed0-3f6aecbd8806] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1063.373125] env[69227]: DEBUG oslo_concurrency.lockutils [None req-b6317fd3-ac53-462d-a029-9b11675c33f9 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "e04f79e6-e7af-4d94-aed0-3f6aecbd8806" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 215.851s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.876337] env[69227]: DEBUG nova.compute.manager [None req-5082fdd3-3b3d-4bd8-86ff-8500608e97ab tempest-ServerActionsV293TestJSON-851954569 tempest-ServerActionsV293TestJSON-851954569-project-member] [instance: 940ca4b4-783b-4527-8559-d00d9e48fd05] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1064.379880] env[69227]: DEBUG nova.compute.manager [None req-5082fdd3-3b3d-4bd8-86ff-8500608e97ab tempest-ServerActionsV293TestJSON-851954569 tempest-ServerActionsV293TestJSON-851954569-project-member] [instance: 940ca4b4-783b-4527-8559-d00d9e48fd05] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1064.893984] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5082fdd3-3b3d-4bd8-86ff-8500608e97ab tempest-ServerActionsV293TestJSON-851954569 tempest-ServerActionsV293TestJSON-851954569-project-member] Lock "940ca4b4-783b-4527-8559-d00d9e48fd05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.214s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.396766] env[69227]: DEBUG nova.compute.manager [None req-485a3965-3038-4d9a-85ce-811160e1b584 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 4f6ce1c9-4790-4901-8462-1b24f52ef54e] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1065.900694] env[69227]: DEBUG nova.compute.manager [None req-485a3965-3038-4d9a-85ce-811160e1b584 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 4f6ce1c9-4790-4901-8462-1b24f52ef54e] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1066.413404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-485a3965-3038-4d9a-85ce-811160e1b584 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "4f6ce1c9-4790-4901-8462-1b24f52ef54e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.349s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.916344] env[69227]: DEBUG nova.compute.manager [None req-e4ea5218-d305-4ebc-a4b8-f5eea393074c tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 532fb3f7-f71e-4906-bf1a-c15f9762c04a] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1067.420642] env[69227]: DEBUG nova.compute.manager [None req-e4ea5218-d305-4ebc-a4b8-f5eea393074c tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] [instance: 532fb3f7-f71e-4906-bf1a-c15f9762c04a] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1067.933550] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e4ea5218-d305-4ebc-a4b8-f5eea393074c tempest-ServersTestMultiNic-631920258 tempest-ServersTestMultiNic-631920258-project-member] Lock "532fb3f7-f71e-4906-bf1a-c15f9762c04a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 209.280s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.436831] env[69227]: DEBUG nova.compute.manager [None req-1120a5de-aae7-4d20-99db-888b38ad375a tempest-InstanceActionsV221TestJSON-1168477305 tempest-InstanceActionsV221TestJSON-1168477305-project-member] [instance: cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1068.941819] env[69227]: DEBUG nova.compute.manager [None req-1120a5de-aae7-4d20-99db-888b38ad375a tempest-InstanceActionsV221TestJSON-1168477305 tempest-InstanceActionsV221TestJSON-1168477305-project-member] [instance: cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1069.455919] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1120a5de-aae7-4d20-99db-888b38ad375a tempest-InstanceActionsV221TestJSON-1168477305 tempest-InstanceActionsV221TestJSON-1168477305-project-member] Lock "cabe5f5e-cf99-4c5d-b8e4-99070aa2b50e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.271s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.958142] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1070.481859] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.482163] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.483680] env[69227]: INFO nova.compute.claims [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.807922] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c4cc8b-fa8f-4d06-b26d-85ac4e0c3d2d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.815802] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b75dec-af60-4c43-a7c5-98be05dab506 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.844636] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3667ae5f-ff6d-4ae9-9a39-a0a0e7fb9042 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.851398] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f429ce5-e1c4-48fd-a1b9-730e41279ae4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.864151] env[69227]: DEBUG nova.compute.provider_tree [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.367019] env[69227]: DEBUG nova.scheduler.client.report [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1072.872157] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.872700] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1073.380048] env[69227]: DEBUG nova.compute.utils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1073.383842] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1073.385140] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1073.442778] env[69227]: DEBUG nova.policy [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd8bc936e55b94c59aa51596e0963dffd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '92616e4226c44225a5a509e9e6602cae', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1073.717464] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Successfully created port: 7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1073.889408] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1074.898392] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1074.924530] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1074.924776] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1074.924929] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1074.925126] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1074.925274] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1074.925421] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1074.925623] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1074.925776] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1074.925936] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1074.926105] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1074.926278] env[69227]: DEBUG nova.virt.hardware [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1074.927159] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c577baee-4ab0-42b6-b294-c4fb57e2de9a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.935131] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea37af4f-971a-433e-b2bb-4cfbcb9defcc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.178201] env[69227]: DEBUG nova.compute.manager [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Received event network-vif-plugged-7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1075.178437] env[69227]: DEBUG oslo_concurrency.lockutils [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] Acquiring lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.178644] env[69227]: DEBUG oslo_concurrency.lockutils [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.178810] env[69227]: DEBUG oslo_concurrency.lockutils [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.178974] env[69227]: DEBUG nova.compute.manager [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] No waiting events found dispatching network-vif-plugged-7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1075.179610] env[69227]: WARNING nova.compute.manager [req-e72b849c-9156-4430-9279-b56428703d3e req-154c38ce-2e35-4092-b354-b05df7ff4f33 service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Received unexpected event network-vif-plugged-7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 for instance with vm_state building and task_state spawning. [ 1075.270994] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Successfully updated port: 7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1075.775510] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1075.775697] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1075.775852] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1076.312044] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1076.448225] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Updating instance_info_cache with network_info: [{"id": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "address": "fa:16:3e:16:ac:ea", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0a7325-db", "ovs_interfaceid": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1076.952014] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1076.952014] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance network_info: |[{"id": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "address": "fa:16:3e:16:ac:ea", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0a7325-db", "ovs_interfaceid": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1076.952407] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:ac:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7e0a7325-db63-4aa8-85b5-9a5d6cfc3530', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1076.960445] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.960645] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1076.960870] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f08d72ee-2970-43dd-b6d1-6de14138abce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.981701] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1076.981701] env[69227]: value = "task-3475052" [ 1076.981701] env[69227]: _type = "Task" [ 1076.981701] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.989079] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475052, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.206985] env[69227]: DEBUG nova.compute.manager [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Received event network-changed-7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1077.207278] env[69227]: DEBUG nova.compute.manager [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Refreshing instance network info cache due to event network-changed-7e0a7325-db63-4aa8-85b5-9a5d6cfc3530. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1077.207504] env[69227]: DEBUG oslo_concurrency.lockutils [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] Acquiring lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.207647] env[69227]: DEBUG oslo_concurrency.lockutils [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] Acquired lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.207869] env[69227]: DEBUG nova.network.neutron [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Refreshing network info cache for port 7e0a7325-db63-4aa8-85b5-9a5d6cfc3530 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1077.491920] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475052, 'name': CreateVM_Task, 'duration_secs': 0.284145} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.493056] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1077.493177] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1077.493381] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1077.493689] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1077.493922] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f9d3e99-0183-47e0-ba53-c514706e7915 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.498246] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 1077.498246] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c6c231-38d1-db1c-8083-04eeea365af9" [ 1077.498246] env[69227]: _type = "Task" [ 1077.498246] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.505276] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c6c231-38d1-db1c-8083-04eeea365af9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.890099] env[69227]: DEBUG nova.network.neutron [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Updated VIF entry in instance network info cache for port 7e0a7325-db63-4aa8-85b5-9a5d6cfc3530. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1077.890477] env[69227]: DEBUG nova.network.neutron [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Updating instance_info_cache with network_info: [{"id": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "address": "fa:16:3e:16:ac:ea", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.59", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7e0a7325-db", "ovs_interfaceid": "7e0a7325-db63-4aa8-85b5-9a5d6cfc3530", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.008600] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1078.008710] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1078.008907] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1078.393832] env[69227]: DEBUG oslo_concurrency.lockutils [req-ba8dfae9-0ffe-4821-beb9-8d2e844152c1 req-dcb59784-7400-401b-a85a-d5703b84ddfa service nova] Releasing lock "refresh_cache-43397ae2-14e8-495d-bdd9-54a14e6427e9" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.939765] env[69227]: WARNING oslo_vmware.rw_handles [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1085.939765] env[69227]: ERROR oslo_vmware.rw_handles [ 1085.940593] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1085.942372] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1085.942623] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Copying Virtual Disk [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/8a7ad27f-5d69-47c6-b3d6-5e7e5b7773cf/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1085.942907] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b325ebd9-1d29-45ac-8c15-5dede16ee32b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.950059] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1085.950059] env[69227]: value = "task-3475053" [ 1085.950059] env[69227]: _type = "Task" [ 1085.950059] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.958285] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': task-3475053, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.460474] env[69227]: DEBUG oslo_vmware.exceptions [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1086.460786] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.461382] env[69227]: ERROR nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.461382] env[69227]: Faults: ['InvalidArgument'] [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Traceback (most recent call last): [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] yield resources [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self.driver.spawn(context, instance, image_meta, [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self._fetch_image_if_missing(context, vi) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] image_cache(vi, tmp_image_ds_loc) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] vm_util.copy_virtual_disk( [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] session._wait_for_task(vmdk_copy_task) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return self.wait_for_task(task_ref) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return evt.wait() [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] result = hub.switch() [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return self.greenlet.switch() [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self.f(*self.args, **self.kw) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] raise exceptions.translate_fault(task_info.error) [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Faults: ['InvalidArgument'] [ 1086.461382] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] [ 1086.462457] env[69227]: INFO nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Terminating instance [ 1086.463252] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.463462] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.464094] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1086.464286] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1086.464512] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59e5a527-3389-41ae-970a-214af55d4bf8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.466952] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aac0a85-365f-4444-8cb3-7ef28dc293e9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.474582] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1086.474582] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7584391c-9e90-41e5-b5d8-aa3f31730585 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.476671] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.476846] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1086.477836] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df529236-6fc8-426a-9723-0b0ecb3f1efa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.482467] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for the task: (returnval){ [ 1086.482467] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52245fc9-2db7-29db-236c-eec15dab738d" [ 1086.482467] env[69227]: _type = "Task" [ 1086.482467] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.491281] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52245fc9-2db7-29db-236c-eec15dab738d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.541739] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1086.541836] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1086.542034] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleting the datastore file [datastore2] 02ec5165-3b99-4d81-a7d9-716e63076cb0 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.542312] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72cabab5-d98e-448c-9d7b-7243216072b4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.548995] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1086.548995] env[69227]: value = "task-3475055" [ 1086.548995] env[69227]: _type = "Task" [ 1086.548995] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.556867] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': task-3475055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.992649] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1086.993014] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Creating directory with path [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.993238] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c45d920-a5d4-462e-bea2-182cecf865a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.005887] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Created directory with path [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.006109] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Fetch image to [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1087.006283] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1087.007033] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd31ef3-1c1c-48e7-9471-09a73f5cfb5b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.014320] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecad6c48-2eac-4323-8566-dcae15f24254 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.023957] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c74d9d5-81fa-458b-81cb-95bc06dc5ad2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.061942] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c77010-0206-4e8d-afc6-88f214519d5c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.068247] env[69227]: DEBUG oslo_vmware.api [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': task-3475055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.066597} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.069610] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1087.069808] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1087.069980] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1087.070169] env[69227]: INFO nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1087.071884] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d68566c5-7280-4ecf-90df-2e8fa5faa7db {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.073702] env[69227]: DEBUG nova.compute.claims [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1087.073886] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.074148] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.094445] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1087.145129] env[69227]: DEBUG oslo_vmware.rw_handles [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1087.206548] env[69227]: DEBUG oslo_vmware.rw_handles [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1087.206802] env[69227]: DEBUG oslo_vmware.rw_handles [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1087.887487] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53616a8-7f06-45e4-86f3-561c94ee4ca7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.894927] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89eaa862-f9d7-4c34-b322-2d7796ed4ecd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.925209] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396bb532-daa4-4db8-b3c8-d4f31135c5bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.932400] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef7ea690-3ad6-49f0-9250-daf9ce2e99c3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.945588] env[69227]: DEBUG nova.compute.provider_tree [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1088.449371] env[69227]: DEBUG nova.scheduler.client.report [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1088.954909] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.881s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.955515] env[69227]: ERROR nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.955515] env[69227]: Faults: ['InvalidArgument'] [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Traceback (most recent call last): [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self.driver.spawn(context, instance, image_meta, [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self._fetch_image_if_missing(context, vi) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] image_cache(vi, tmp_image_ds_loc) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] vm_util.copy_virtual_disk( [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] session._wait_for_task(vmdk_copy_task) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return self.wait_for_task(task_ref) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return evt.wait() [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] result = hub.switch() [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] return self.greenlet.switch() [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] self.f(*self.args, **self.kw) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] raise exceptions.translate_fault(task_info.error) [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Faults: ['InvalidArgument'] [ 1088.955515] env[69227]: ERROR nova.compute.manager [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] [ 1088.956441] env[69227]: DEBUG nova.compute.utils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1088.957829] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Build of instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 was re-scheduled: A specified parameter was not correct: fileType [ 1088.957829] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1088.958209] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1088.958370] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1088.958518] env[69227]: DEBUG nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1088.958676] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1089.684589] env[69227]: DEBUG nova.network.neutron [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.187588] env[69227]: INFO nova.compute.manager [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Took 1.23 seconds to deallocate network for instance. [ 1091.218834] env[69227]: INFO nova.scheduler.client.report [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleted allocations for instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 [ 1091.427801] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1091.726796] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1e4f470e-16d4-4655-909c-c4f042a52d64 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 483.181s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.727863] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 284.836s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.728102] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.728327] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.728532] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.730230] env[69227]: INFO nova.compute.manager [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Terminating instance [ 1091.731872] env[69227]: DEBUG nova.compute.manager [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1091.732074] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1091.732325] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4bceecf-db4a-4fcd-90ac-f8a4bc4e1733 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.741612] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e863597-cdaf-4f9d-9ca1-e1ddd4cba1f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.769756] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 02ec5165-3b99-4d81-a7d9-716e63076cb0 could not be found. [ 1091.769942] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1091.770134] env[69227]: INFO nova.compute.manager [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1091.770370] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1091.770620] env[69227]: DEBUG nova.compute.manager [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1091.770719] env[69227]: DEBUG nova.network.neutron [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1092.231456] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1092.289307] env[69227]: DEBUG nova.network.neutron [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.755429] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.755790] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.757270] env[69227]: INFO nova.compute.claims [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1092.791615] env[69227]: INFO nova.compute.manager [-] [instance: 02ec5165-3b99-4d81-a7d9-716e63076cb0] Took 1.02 seconds to deallocate network for instance. [ 1093.814656] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b95d0db-d073-462e-8feb-08763466fa57 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "02ec5165-3b99-4d81-a7d9-716e63076cb0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.087s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.045155] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dae84cb5-469b-47c7-a37c-31efdcfa840d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.052302] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a8cbe-8231-4a99-ba50-2b4c5a4b84fb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.082595] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a031a0-cad8-4f86-8c93-0245088152b2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.090041] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb26ce65-f924-47be-af6a-0f45d544d2a7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.102823] env[69227]: DEBUG nova.compute.provider_tree [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.427147] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1094.606759] env[69227]: DEBUG nova.scheduler.client.report [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1095.111915] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.112422] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1095.617200] env[69227]: DEBUG nova.compute.utils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1095.618577] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1095.618898] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1095.667460] env[69227]: DEBUG nova.policy [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38cc6f8efd4b4424bfee964903f22428', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5865abc9adfe46d2bfb03ef3bd9ed3a0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1096.122682] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1096.168117] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Successfully created port: 8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.427090] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.929668] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.929873] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.930058] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.930219] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1096.931264] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730a4ccc-54dc-4a0e-892e-8df050e93b1f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.939584] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46efa8b8-c38b-4e97-8339-9533231de02a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.953146] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702eb92d-a17b-4978-b21a-647ea48c22ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.959015] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a69a23-541b-43d4-bf2e-05536ebfec8e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.988157] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180912MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1096.988311] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.988548] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.131691] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1097.156518] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1097.156758] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1097.157054] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.157288] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1097.157436] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.157611] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1097.157832] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1097.157989] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1097.158171] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1097.158336] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1097.158522] env[69227]: DEBUG nova.virt.hardware [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1097.159371] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e22659f-7761-4186-a9d5-fac2e33a4e9e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.167162] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe605624-a15d-4a6e-9a33-efee65215483 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.696621] env[69227]: DEBUG nova.compute.manager [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Received event network-vif-plugged-8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1097.696846] env[69227]: DEBUG oslo_concurrency.lockutils [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] Acquiring lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.697701] env[69227]: DEBUG oslo_concurrency.lockutils [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.698057] env[69227]: DEBUG oslo_concurrency.lockutils [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.698300] env[69227]: DEBUG nova.compute.manager [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] No waiting events found dispatching network-vif-plugged-8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1097.698405] env[69227]: WARNING nova.compute.manager [req-a7c26fdf-4d1e-4b5d-8a44-45edeebd6126 req-ded1cc4d-4d21-491b-a8b5-d61d4f11fefd service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Received unexpected event network-vif-plugged-8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 for instance with vm_state building and task_state spawning. [ 1097.758195] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Successfully updated port: 8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.020077] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 334575bf-5847-41d5-85bd-e72f08a80a59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.020233] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.020416] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.020613] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.020780] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.020927] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.021061] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.021176] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.021290] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.021395] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1098.260548] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.260842] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquired lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.260882] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1098.525823] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0da7dce2-7016-4020-8b9c-15b1e2f5f349 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1098.797729] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1098.947273] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Updating instance_info_cache with network_info: [{"id": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "address": "fa:16:3e:fb:cc:8e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c2a1bb5-95", "ovs_interfaceid": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.029472] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.400327] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.449548] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Releasing lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.449891] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance network_info: |[{"id": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "address": "fa:16:3e:fb:cc:8e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c2a1bb5-95", "ovs_interfaceid": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1099.450344] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:cc:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c2a1bb5-9503-4179-8c0f-2991fb8b9d28', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.457704] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Creating folder: Project (5865abc9adfe46d2bfb03ef3bd9ed3a0). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1099.457961] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f11e3b7d-7601-4e99-83a8-40f8841624c2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.468525] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Created folder: Project (5865abc9adfe46d2bfb03ef3bd9ed3a0) in parent group-v694623. [ 1099.468707] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Creating folder: Instances. Parent ref: group-v694680. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1099.468927] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b043313-7410-405d-9ea2-32cf82fb952b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.478107] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Created folder: Instances in parent group-v694680. [ 1099.478314] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1099.478493] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1099.478678] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-14f4d362-9b25-460a-b116-80312ec55553 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.496094] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.496094] env[69227]: value = "task-3475058" [ 1099.496094] env[69227]: _type = "Task" [ 1099.496094] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.506168] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475058, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.535974] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fda81689-7e25-4d08-b0f4-58df21bb2a19 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1099.724902] env[69227]: DEBUG nova.compute.manager [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Received event network-changed-8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1099.725219] env[69227]: DEBUG nova.compute.manager [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Refreshing instance network info cache due to event network-changed-8c2a1bb5-9503-4179-8c0f-2991fb8b9d28. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1099.725446] env[69227]: DEBUG oslo_concurrency.lockutils [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] Acquiring lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.725595] env[69227]: DEBUG oslo_concurrency.lockutils [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] Acquired lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.725842] env[69227]: DEBUG nova.network.neutron [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Refreshing network info cache for port 8c2a1bb5-9503-4179-8c0f-2991fb8b9d28 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1100.005658] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475058, 'name': CreateVM_Task, 'duration_secs': 0.290523} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.005824] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1100.006505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.006756] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1100.007105] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1100.007305] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6637336e-3a50-449f-b623-584efc2c0ffe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.014025] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Waiting for the task: (returnval){ [ 1100.014025] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c69bcb-0227-bfde-5569-748839e33b78" [ 1100.014025] env[69227]: _type = "Task" [ 1100.014025] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.019488] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c69bcb-0227-bfde-5569-748839e33b78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.038330] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1100.460261] env[69227]: DEBUG nova.network.neutron [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Updated VIF entry in instance network info cache for port 8c2a1bb5-9503-4179-8c0f-2991fb8b9d28. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1100.460672] env[69227]: DEBUG nova.network.neutron [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Updating instance_info_cache with network_info: [{"id": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "address": "fa:16:3e:fb:cc:8e", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.131", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c2a1bb5-95", "ovs_interfaceid": "8c2a1bb5-9503-4179-8c0f-2991fb8b9d28", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.522343] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.522529] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1100.522741] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.540899] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f9e2c26e-20e3-4333-8437-53dd42d1a1e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1100.963751] env[69227]: DEBUG oslo_concurrency.lockutils [req-fd21dedf-03fc-480a-ac32-e446deab94e3 req-8e4b3bf5-d5c4-4e3c-8f98-68c4431b1bb9 service nova] Releasing lock "refresh_cache-12393e1f-9cb4-4d54-b485-ddc70c65ac47" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.043874] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1101.546610] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e5db927a-625f-49c5-8f82-041550fcac67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1102.049510] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74c8d4ad-fa82-488d-aad6-b952061ef2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1102.553073] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2b465fda-e6e8-473a-b17e-e5de876c171d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.056553] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1103.559609] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 50908bce-98db-4f89-b4e2-81e059044088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.065830] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1104.570463] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance db33565c-80fa-419c-8f46-bb38e6b7e7ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.074247] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5512573f-d965-4cb0-acfd-6ba248e4774f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1105.577322] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 96086386-f894-4ce9-9fc0-ea710b5cca44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.080973] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.584012] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7f82862-e8aa-42f8-8bbe-38f068f0e6ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1106.584406] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1106.584459] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1106.927822] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c69cc4-cdba-478d-bfe5-a3faf4cfa371 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.935361] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfe224c-8a38-4307-a350-cf3fdfb88ae7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.965425] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea4b9b3-e9ed-4704-bb8a-363a7c8aff40 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.972752] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c29fd0-8384-4cfc-8339-12e5f77751df {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.986181] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.489067] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1107.994519] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1107.994867] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.006s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.990596] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.990988] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1108.991178] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1108.991303] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1109.495504] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.495779] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.495822] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.495951] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496088] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496209] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496324] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496440] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496555] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496678] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1109.496793] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1109.496967] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.497643] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.497643] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.497643] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.497643] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1110.244691] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.837031] env[69227]: WARNING oslo_vmware.rw_handles [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1134.837031] env[69227]: ERROR oslo_vmware.rw_handles [ 1134.837744] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1134.839668] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1134.839919] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Copying Virtual Disk [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/8bc43b45-089d-4696-8a9a-a24407379f9c/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1134.840235] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7676f1a3-f659-4952-9f70-2d3bd0d7b8ed {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.848101] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for the task: (returnval){ [ 1134.848101] env[69227]: value = "task-3475059" [ 1134.848101] env[69227]: _type = "Task" [ 1134.848101] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.857329] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Task: {'id': task-3475059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.013442] env[69227]: DEBUG oslo_vmware.exceptions [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1136.013442] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1136.013924] env[69227]: ERROR nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.013924] env[69227]: Faults: ['InvalidArgument'] [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Traceback (most recent call last): [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] yield resources [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self.driver.spawn(context, instance, image_meta, [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self._fetch_image_if_missing(context, vi) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] image_cache(vi, tmp_image_ds_loc) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] vm_util.copy_virtual_disk( [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] session._wait_for_task(vmdk_copy_task) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return self.wait_for_task(task_ref) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return evt.wait() [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] result = hub.switch() [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return self.greenlet.switch() [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self.f(*self.args, **self.kw) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] raise exceptions.translate_fault(task_info.error) [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Faults: ['InvalidArgument'] [ 1136.013924] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] [ 1136.019211] env[69227]: INFO nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Terminating instance [ 1136.019211] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1136.019211] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1136.019211] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.019211] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.019211] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f51d5a-9d55-4d86-86ea-772b246169d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.020670] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1e98631-a5fd-4a3c-937d-3f698a0c3e79 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.026928] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1136.027151] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-daad209f-01e6-47d8-a080-7c88f4a60b4b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.029257] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.029406] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1136.030319] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74bd7950-9903-4881-ad4e-d86ce01a6193 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.034873] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for the task: (returnval){ [ 1136.034873] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b4d36d-8fe5-d181-25fe-19115a1dad21" [ 1136.034873] env[69227]: _type = "Task" [ 1136.034873] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.041909] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b4d36d-8fe5-d181-25fe-19115a1dad21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.093356] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1136.093571] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1136.093752] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Deleting the datastore file [datastore2] 334575bf-5847-41d5-85bd-e72f08a80a59 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1136.094028] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34fd3112-abc3-4366-81ab-7bf6776d588c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.099712] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for the task: (returnval){ [ 1136.099712] env[69227]: value = "task-3475061" [ 1136.099712] env[69227]: _type = "Task" [ 1136.099712] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.107580] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Task: {'id': task-3475061, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.545039] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1136.545330] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Creating directory with path [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1136.545564] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e355c1d8-08f8-444f-8c39-379418812383 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.556673] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Created directory with path [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1136.556868] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Fetch image to [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1136.557054] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1136.557793] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c2fb14-c843-47cf-a840-c69e76aafb5a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.564068] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e093173e-f968-469a-b9b8-637ea8220f1c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.572564] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb0e2e1-bf29-418e-b0d4-166b452ec168 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.604489] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88a5e1a9-aee2-42e2-a83c-23172faf99d1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.610812] env[69227]: DEBUG oslo_vmware.api [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Task: {'id': task-3475061, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076434} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.612197] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1136.612394] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1136.612563] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1136.612729] env[69227]: INFO nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1136.614423] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-65412cd4-8089-48ec-8f2b-67b0f90e4643 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.616208] env[69227]: DEBUG nova.compute.claims [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1136.616380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.616608] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.640928] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1136.689215] env[69227]: DEBUG oslo_vmware.rw_handles [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1136.749042] env[69227]: DEBUG oslo_vmware.rw_handles [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1136.749269] env[69227]: DEBUG oslo_vmware.rw_handles [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1137.407803] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a930de13-f11d-4a0a-a8ed-341941711c48 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.415244] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f779c828-a8f1-4475-9fdd-cae78c6dfc76 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.444369] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4cc066-6b88-404a-a6b4-ed10a6ca2d74 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.450742] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77653c3-ee5f-44b1-879e-172ffb9bd0ad {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.463097] env[69227]: DEBUG nova.compute.provider_tree [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.965846] env[69227]: DEBUG nova.scheduler.client.report [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1138.471519] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.855s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.472118] env[69227]: ERROR nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1138.472118] env[69227]: Faults: ['InvalidArgument'] [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Traceback (most recent call last): [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self.driver.spawn(context, instance, image_meta, [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self._fetch_image_if_missing(context, vi) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] image_cache(vi, tmp_image_ds_loc) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] vm_util.copy_virtual_disk( [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] session._wait_for_task(vmdk_copy_task) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return self.wait_for_task(task_ref) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return evt.wait() [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] result = hub.switch() [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] return self.greenlet.switch() [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] self.f(*self.args, **self.kw) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] raise exceptions.translate_fault(task_info.error) [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Faults: ['InvalidArgument'] [ 1138.472118] env[69227]: ERROR nova.compute.manager [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] [ 1138.473064] env[69227]: DEBUG nova.compute.utils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1138.474785] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Build of instance 334575bf-5847-41d5-85bd-e72f08a80a59 was re-scheduled: A specified parameter was not correct: fileType [ 1138.474785] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1138.475168] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1138.475340] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1138.475506] env[69227]: DEBUG nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1138.475662] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1139.218420] env[69227]: DEBUG nova.network.neutron [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.722199] env[69227]: INFO nova.compute.manager [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Took 1.25 seconds to deallocate network for instance. [ 1140.759321] env[69227]: INFO nova.scheduler.client.report [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Deleted allocations for instance 334575bf-5847-41d5-85bd-e72f08a80a59 [ 1141.268044] env[69227]: DEBUG oslo_concurrency.lockutils [None req-274a2f0c-d5f9-4d1d-a401-b3a576a48b7d tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 532.434s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.269722] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 334.815s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.270404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Acquiring lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.270404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.270404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.273808] env[69227]: INFO nova.compute.manager [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Terminating instance [ 1141.275630] env[69227]: DEBUG nova.compute.manager [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1141.275830] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1141.276828] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-522fc201-1949-4e6e-97ac-ee575c504f1e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.285954] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501ea94a-b620-48d4-b18b-175abb7ca224 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.313838] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 334575bf-5847-41d5-85bd-e72f08a80a59 could not be found. [ 1141.314062] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1141.314251] env[69227]: INFO nova.compute.manager [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1141.314488] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1141.314793] env[69227]: DEBUG nova.compute.manager [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1141.314911] env[69227]: DEBUG nova.network.neutron [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1141.773244] env[69227]: DEBUG nova.compute.manager [None req-cdc9a9cd-5716-445d-a320-f1f0ee03916b tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 0da7dce2-7016-4020-8b9c-15b1e2f5f349] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1141.832743] env[69227]: DEBUG nova.network.neutron [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.277519] env[69227]: DEBUG nova.compute.manager [None req-cdc9a9cd-5716-445d-a320-f1f0ee03916b tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 0da7dce2-7016-4020-8b9c-15b1e2f5f349] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1142.335601] env[69227]: INFO nova.compute.manager [-] [instance: 334575bf-5847-41d5-85bd-e72f08a80a59] Took 1.02 seconds to deallocate network for instance. [ 1142.791721] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cdc9a9cd-5716-445d-a320-f1f0ee03916b tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "0da7dce2-7016-4020-8b9c-15b1e2f5f349" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 228.522s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.294375] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1143.359408] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f740c118-7525-4b2a-9ad7-8de3d31d3284 tempest-ServersTestFqdnHostnames-288322231 tempest-ServersTestFqdnHostnames-288322231-project-member] Lock "334575bf-5847-41d5-85bd-e72f08a80a59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.090s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.815964] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.816274] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.817842] env[69227]: INFO nova.compute.claims [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1145.093615] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de469e21-ecb2-4f74-b389-2d36f2405d1c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.101190] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc391e51-0c62-4358-8c10-1eecfb86399f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.130535] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11c6ec6-4b75-47a4-846f-9ff6d00c0ca1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.136991] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1426b201-8747-4897-ba35-4eada3de2cd2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.150211] env[69227]: DEBUG nova.compute.provider_tree [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.654097] env[69227]: DEBUG nova.scheduler.client.report [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1146.160298] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.160835] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1146.667907] env[69227]: DEBUG nova.compute.utils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1146.668129] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1146.668165] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1146.717182] env[69227]: DEBUG nova.policy [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dbdc1a361ec24e4284d1d1d35d77a11f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b44af828f3fb42d09eda71f30ed171af', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1147.043568] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Successfully created port: 4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1147.171742] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1148.180945] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1148.209031] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1148.209467] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1148.209467] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1148.209539] env[69227]: DEBUG nova.virt.hardware [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1148.210483] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e1189c-7776-4504-b83f-3a2e0496a0dd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.219245] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccf0964-c7de-45b3-8709-32f45765e939 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.509959] env[69227]: DEBUG nova.compute.manager [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Received event network-vif-plugged-4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1148.510280] env[69227]: DEBUG oslo_concurrency.lockutils [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] Acquiring lock "859d632d-fb95-4ac6-9219-8768191979a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.510554] env[69227]: DEBUG oslo_concurrency.lockutils [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] Lock "859d632d-fb95-4ac6-9219-8768191979a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.510774] env[69227]: DEBUG oslo_concurrency.lockutils [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] Lock "859d632d-fb95-4ac6-9219-8768191979a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.510974] env[69227]: DEBUG nova.compute.manager [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] No waiting events found dispatching network-vif-plugged-4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1148.511322] env[69227]: WARNING nova.compute.manager [req-e03fbf47-12ef-45fc-9167-7ff0678793c1 req-f3d25a72-10b2-4f9a-8cf2-ef0baf82d9c4 service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Received unexpected event network-vif-plugged-4d016659-3526-40d9-a96f-eba219acf3a3 for instance with vm_state building and task_state spawning. [ 1148.594617] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Successfully updated port: 4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1149.097300] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.097457] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquired lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.097609] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1149.628746] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1149.794073] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Updating instance_info_cache with network_info: [{"id": "4d016659-3526-40d9-a96f-eba219acf3a3", "address": "fa:16:3e:70:32:dc", "network": {"id": "3cada74f-8ddb-4455-8921-161a8efc0b60", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-208328684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b44af828f3fb42d09eda71f30ed171af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d016659-35", "ovs_interfaceid": "4d016659-3526-40d9-a96f-eba219acf3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.296499] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Releasing lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.296909] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance network_info: |[{"id": "4d016659-3526-40d9-a96f-eba219acf3a3", "address": "fa:16:3e:70:32:dc", "network": {"id": "3cada74f-8ddb-4455-8921-161a8efc0b60", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-208328684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b44af828f3fb42d09eda71f30ed171af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d016659-35", "ovs_interfaceid": "4d016659-3526-40d9-a96f-eba219acf3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1150.297348] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:32:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3e51ebca-e0f8-4b77-b155-4ff928eef130', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4d016659-3526-40d9-a96f-eba219acf3a3', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1150.304636] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Creating folder: Project (b44af828f3fb42d09eda71f30ed171af). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1150.304901] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d24ec8e4-180c-4d6c-a5aa-80cd381d5169 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.315150] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Created folder: Project (b44af828f3fb42d09eda71f30ed171af) in parent group-v694623. [ 1150.315325] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Creating folder: Instances. Parent ref: group-v694683. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1150.315540] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f0e1a0be-0862-4c15-9dd2-423e6a292f7d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.323659] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Created folder: Instances in parent group-v694683. [ 1150.323879] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1150.324065] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1150.324254] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef09e2f6-7908-433e-84b3-a94bddf14019 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.341895] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1150.341895] env[69227]: value = "task-3475064" [ 1150.341895] env[69227]: _type = "Task" [ 1150.341895] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.348816] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475064, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.540530] env[69227]: DEBUG nova.compute.manager [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Received event network-changed-4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1150.540733] env[69227]: DEBUG nova.compute.manager [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Refreshing instance network info cache due to event network-changed-4d016659-3526-40d9-a96f-eba219acf3a3. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1150.540945] env[69227]: DEBUG oslo_concurrency.lockutils [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] Acquiring lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.541104] env[69227]: DEBUG oslo_concurrency.lockutils [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] Acquired lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.541355] env[69227]: DEBUG nova.network.neutron [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Refreshing network info cache for port 4d016659-3526-40d9-a96f-eba219acf3a3 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1150.851599] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475064, 'name': CreateVM_Task, 'duration_secs': 0.284516} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.851884] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1150.852508] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.852667] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.853017] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1150.853267] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-946785f8-8398-48a8-80c4-cbf06e7f6309 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.857418] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for the task: (returnval){ [ 1150.857418] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52076af9-b8bc-9e83-347b-a7c3c9a8504b" [ 1150.857418] env[69227]: _type = "Task" [ 1150.857418] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.864625] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52076af9-b8bc-9e83-347b-a7c3c9a8504b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.272033] env[69227]: DEBUG nova.network.neutron [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Updated VIF entry in instance network info cache for port 4d016659-3526-40d9-a96f-eba219acf3a3. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1151.272431] env[69227]: DEBUG nova.network.neutron [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Updating instance_info_cache with network_info: [{"id": "4d016659-3526-40d9-a96f-eba219acf3a3", "address": "fa:16:3e:70:32:dc", "network": {"id": "3cada74f-8ddb-4455-8921-161a8efc0b60", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-208328684-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b44af828f3fb42d09eda71f30ed171af", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3e51ebca-e0f8-4b77-b155-4ff928eef130", "external-id": "nsx-vlan-transportzone-859", "segmentation_id": 859, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4d016659-35", "ovs_interfaceid": "4d016659-3526-40d9-a96f-eba219acf3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.368063] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.368240] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1151.368458] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1151.775291] env[69227]: DEBUG oslo_concurrency.lockutils [req-e974f40e-9327-4f87-920c-8678056148e8 req-fda9571b-2b99-45e6-88fa-95bf0983f93e service nova] Releasing lock "refresh_cache-859d632d-fb95-4ac6-9219-8768191979a5" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.427447] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1154.426978] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1155.322996] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "859d632d-fb95-4ac6-9219-8768191979a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.426749] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.930063] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.930372] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1158.930372] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1159.434932] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435094] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435229] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435351] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435471] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435591] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435729] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435830] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.435944] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.436073] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1159.436192] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1159.436408] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.436564] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.436684] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1159.436824] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1159.940233] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1159.940233] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1159.940587] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1159.940587] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1159.941770] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed79d3d-ff62-44bc-8e03-28d0927806ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.950313] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f74b2c-0504-4565-b0aa-5583751bc435 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.964611] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa4625f-5055-4ea4-99f2-c450d04215c7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.970996] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a40f69-e2da-4254-828e-f7a537825dd9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.000575] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1160.000716] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.000947] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.036742] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037036] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037165] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037294] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037403] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037523] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037638] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037747] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037858] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.037969] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1161.541623] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1162.045743] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f9e2c26e-20e3-4333-8437-53dd42d1a1e6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1162.549083] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1163.052182] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance e5db927a-625f-49c5-8f82-041550fcac67 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1163.555471] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 74c8d4ad-fa82-488d-aad6-b952061ef2c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.058220] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2b465fda-e6e8-473a-b17e-e5de876c171d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1164.561919] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1165.065536] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 50908bce-98db-4f89-b4e2-81e059044088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1165.568529] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1166.071958] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance db33565c-80fa-419c-8f46-bb38e6b7e7ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1166.574867] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5512573f-d965-4cb0-acfd-6ba248e4774f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.078043] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 96086386-f894-4ce9-9fc0-ea710b5cca44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1167.580404] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.083062] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7f82862-e8aa-42f8-8bbe-38f068f0e6ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1168.083354] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1168.083471] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1168.181081] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1168.193567] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1168.193780] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1168.205084] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1168.222578] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1168.458116] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8edb745f-1ed7-45da-a736-502612ad83b0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.465587] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4fcff7-649a-49e4-993c-57e2776b6df0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.494462] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a262c540-2986-4087-9a55-05afe91a8b6b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.501012] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49af9f8a-b9ff-479d-9a30-1cf77815c631 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.513436] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1169.016116] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1169.520899] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1169.521226] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.520s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.521381] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1169.521559] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 1170.024592] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 0 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 1170.024837] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.024975] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 1171.020333] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.020621] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.529104] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.529104] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.529276] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.034853] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 1172.034853] env[69227]: value = "domain-c8" [ 1172.034853] env[69227]: _type = "ClusterComputeResource" [ 1172.034853] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1172.035975] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2af30aef-fadd-4130-b4d9-e09fa5f3bd99 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.052996] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 10 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1172.053218] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 1724aea2-9fe0-4134-adcc-1a8baf512a80 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.053458] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid f77adbc9-4a34-438e-8e0c-ddab0d1f4603 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.053645] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid be8dae7e-b829-455a-b8d3-73fb04c40128 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.053861] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054049] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 3a8be11c-6a0e-4dbb-97c0-4290a2716487 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054238] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid b6ffb3bc-196c-4ac2-b506-3fc514653c5e {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054418] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 9c61d411-b6dd-43c9-a59a-8ff3030e6149 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054592] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 43397ae2-14e8-495d-bdd9-54a14e6427e9 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054762] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 12393e1f-9cb4-4d54-b485-ddc70c65ac47 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.054941] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 859d632d-fb95-4ac6-9219-8768191979a5 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1172.055622] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.055622] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.055748] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "be8dae7e-b829-455a-b8d3-73fb04c40128" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.055984] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.056235] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.056469] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.056702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.056927] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.057169] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1172.057400] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "859d632d-fb95-4ac6-9219-8768191979a5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1184.291046] env[69227]: WARNING oslo_vmware.rw_handles [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1184.291046] env[69227]: ERROR oslo_vmware.rw_handles [ 1184.291765] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1184.293522] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1184.293757] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Copying Virtual Disk [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/9ac6a682-c253-46de-9e4e-d38f08b78f64/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1184.294072] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42b1f18b-5891-4b97-926f-21096715fbe0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.301796] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for the task: (returnval){ [ 1184.301796] env[69227]: value = "task-3475065" [ 1184.301796] env[69227]: _type = "Task" [ 1184.301796] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.309860] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Task: {'id': task-3475065, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.812310] env[69227]: DEBUG oslo_vmware.exceptions [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1184.812614] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.813209] env[69227]: ERROR nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1184.813209] env[69227]: Faults: ['InvalidArgument'] [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Traceback (most recent call last): [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] yield resources [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self.driver.spawn(context, instance, image_meta, [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self._fetch_image_if_missing(context, vi) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] image_cache(vi, tmp_image_ds_loc) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] vm_util.copy_virtual_disk( [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] session._wait_for_task(vmdk_copy_task) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return self.wait_for_task(task_ref) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return evt.wait() [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] result = hub.switch() [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return self.greenlet.switch() [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self.f(*self.args, **self.kw) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] raise exceptions.translate_fault(task_info.error) [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Faults: ['InvalidArgument'] [ 1184.813209] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] [ 1184.814197] env[69227]: INFO nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Terminating instance [ 1184.815031] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.815250] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1184.815485] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-536eea90-dca7-4ab5-b1ee-7e38f0119ca9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.818532] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1184.818794] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1184.820425] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af69577-6c98-4d6a-91f0-413178541c6e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.823857] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1184.824064] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1184.826490] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b265a59-ec03-490a-9898-93313fcecb6a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.828642] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1184.828854] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60d76f2e-9155-4463-b20d-38b690eaa107 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.832985] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for the task: (returnval){ [ 1184.832985] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52828dd7-4b77-178a-eb7e-ac21ede6e8d3" [ 1184.832985] env[69227]: _type = "Task" [ 1184.832985] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.843642] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52828dd7-4b77-178a-eb7e-ac21ede6e8d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.903461] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1184.903690] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1184.903870] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Deleting the datastore file [datastore2] 1724aea2-9fe0-4134-adcc-1a8baf512a80 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1184.904151] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0d94b6c9-5a62-4cd3-8f61-4a3b9f089f91 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.910745] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for the task: (returnval){ [ 1184.910745] env[69227]: value = "task-3475067" [ 1184.910745] env[69227]: _type = "Task" [ 1184.910745] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.918216] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Task: {'id': task-3475067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.342779] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1185.343154] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Creating directory with path [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1185.343289] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d6f5597-6a1d-4875-8bac-e69c643f3125 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.354584] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Created directory with path [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1185.354801] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Fetch image to [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1185.354936] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1185.355636] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28eb65cb-b49c-4563-a795-692bc1c6895d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.361709] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3683e71-7ad1-4381-83b2-65e5d9e20347 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.370352] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de488278-a495-4e11-a627-e2a9773815b3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.401312] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d01488c3-f7d0-41a7-b164-5630a091ec3a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.408674] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c60954b9-3ea6-419f-a31e-4389b4788246 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.418617] env[69227]: DEBUG oslo_vmware.api [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Task: {'id': task-3475067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071321} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.418844] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1185.419031] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1185.419204] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1185.419377] env[69227]: INFO nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1185.421453] env[69227]: DEBUG nova.compute.claims [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1185.421707] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.421986] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.433018] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1185.482850] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1185.541967] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1185.542176] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1186.200174] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea75e9f7-00d9-4712-90bb-61daa6c953ed {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.207678] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ff1ad3-4bbd-411c-906e-774b324f8eae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.239254] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dad7c5f-4a91-4bc3-b27a-64c2fe2f682f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.246315] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a42da4-7d90-4fef-95c1-cd30339f0fa9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.259477] env[69227]: DEBUG nova.compute.provider_tree [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1186.763104] env[69227]: DEBUG nova.scheduler.client.report [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1187.268684] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.269291] env[69227]: ERROR nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1187.269291] env[69227]: Faults: ['InvalidArgument'] [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Traceback (most recent call last): [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self.driver.spawn(context, instance, image_meta, [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self._fetch_image_if_missing(context, vi) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] image_cache(vi, tmp_image_ds_loc) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] vm_util.copy_virtual_disk( [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] session._wait_for_task(vmdk_copy_task) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return self.wait_for_task(task_ref) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return evt.wait() [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] result = hub.switch() [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] return self.greenlet.switch() [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] self.f(*self.args, **self.kw) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] raise exceptions.translate_fault(task_info.error) [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Faults: ['InvalidArgument'] [ 1187.269291] env[69227]: ERROR nova.compute.manager [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] [ 1187.270138] env[69227]: DEBUG nova.compute.utils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1187.271905] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Build of instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 was re-scheduled: A specified parameter was not correct: fileType [ 1187.271905] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1187.272291] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1187.272462] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1187.272649] env[69227]: DEBUG nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1187.272820] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1188.036457] env[69227]: DEBUG nova.network.neutron [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.539446] env[69227]: INFO nova.compute.manager [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Took 1.27 seconds to deallocate network for instance. [ 1189.571031] env[69227]: INFO nova.scheduler.client.report [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Deleted allocations for instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 [ 1190.081984] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6e7c9013-48e9-46b4-9779-f19edebed2a1 tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.255s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.083350] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.242s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.083515] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Acquiring lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1190.083725] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1190.083895] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1190.085698] env[69227]: INFO nova.compute.manager [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Terminating instance [ 1190.087258] env[69227]: DEBUG nova.compute.manager [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1190.087454] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1190.087706] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41c02faf-413b-40a7-9e83-8db037dbecff {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.096735] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0061fab0-873d-48bc-bea6-7557d0520efd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.125959] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1724aea2-9fe0-4134-adcc-1a8baf512a80 could not be found. [ 1190.126161] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1190.126335] env[69227]: INFO nova.compute.manager [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1190.126561] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.126774] env[69227]: DEBUG nova.compute.manager [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1190.126867] env[69227]: DEBUG nova.network.neutron [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1190.586581] env[69227]: DEBUG nova.compute.manager [None req-026fd92e-b7c3-4457-b808-c587ac22e4dc tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: fda81689-7e25-4d08-b0f4-58df21bb2a19] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1190.645183] env[69227]: DEBUG nova.network.neutron [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.093930] env[69227]: DEBUG nova.compute.manager [None req-026fd92e-b7c3-4457-b808-c587ac22e4dc tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: fda81689-7e25-4d08-b0f4-58df21bb2a19] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1191.147079] env[69227]: INFO nova.compute.manager [-] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] Took 1.02 seconds to deallocate network for instance. [ 1191.611996] env[69227]: DEBUG oslo_concurrency.lockutils [None req-026fd92e-b7c3-4457-b808-c587ac22e4dc tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "fda81689-7e25-4d08-b0f4-58df21bb2a19" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 232.370s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.114798] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1192.170723] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6232463-6e42-4f93-982a-f70957d493bb tempest-ServerDiagnosticsTest-627758463 tempest-ServerDiagnosticsTest-627758463-project-member] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.087s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.172057] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 20.116s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.172057] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1724aea2-9fe0-4134-adcc-1a8baf512a80] During sync_power_state the instance has a pending task (deleting). Skip. [ 1192.172288] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1724aea2-9fe0-4134-adcc-1a8baf512a80" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.638035] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.638035] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.638729] env[69227]: INFO nova.compute.claims [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1193.921662] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880d5824-aaa8-46e2-8dd3-19f335498985 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.929289] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a91d3dd-d144-47cf-9624-73ffbce92a3d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.960846] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fb5088-0e27-450f-9631-26ee521ef201 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.967965] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7910348-74df-4648-92a4-5fb01c8f8a9b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.980822] env[69227]: DEBUG nova.compute.provider_tree [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1194.484473] env[69227]: DEBUG nova.scheduler.client.report [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1194.989084] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.352s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.989647] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1195.494520] env[69227]: DEBUG nova.compute.utils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1195.495960] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1195.496110] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1195.570449] env[69227]: DEBUG nova.policy [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6da7fbd7520f410985f286cc73723095', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e0570eff5d5d42b1b041803f2ae43c5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1196.000181] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1196.080214] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Successfully created port: 17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1197.010600] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1197.037123] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:23:55Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='de08df5a-fef8-41b7-9bca-6a0fe4427f06',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-1746122516',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1197.037392] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1197.037567] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1197.037761] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1197.037925] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1197.038101] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1197.038332] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1197.038512] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1197.038680] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1197.038863] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1197.039071] env[69227]: DEBUG nova.virt.hardware [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1197.040262] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fea01d17-1767-4ea1-8ad0-032168372080 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.048665] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81a5534-3978-42dc-8741-5b13a3f90664 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.565519] env[69227]: DEBUG nova.compute.manager [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Received event network-vif-plugged-17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1197.565741] env[69227]: DEBUG oslo_concurrency.lockutils [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] Acquiring lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.565947] env[69227]: DEBUG oslo_concurrency.lockutils [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.566129] env[69227]: DEBUG oslo_concurrency.lockutils [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.566294] env[69227]: DEBUG nova.compute.manager [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] No waiting events found dispatching network-vif-plugged-17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1197.566451] env[69227]: WARNING nova.compute.manager [req-8b2fa4a6-17db-4071-ab7e-d833c8cf3708 req-58312293-c7ed-4f1a-81c6-93f57e2a1cb1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Received unexpected event network-vif-plugged-17c8935c-c67f-4c80-8da9-23a4db6c4ad8 for instance with vm_state building and task_state spawning. [ 1197.647677] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Successfully updated port: 17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1198.152021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.152021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.152021] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1198.723992] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1198.855573] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Updating instance_info_cache with network_info: [{"id": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "address": "fa:16:3e:a6:3f:e5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17c8935c-c6", "ovs_interfaceid": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.879281] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.358106] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1199.358503] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance network_info: |[{"id": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "address": "fa:16:3e:a6:3f:e5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17c8935c-c6", "ovs_interfaceid": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1199.358915] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:3f:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17c8935c-c67f-4c80-8da9-23a4db6c4ad8', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1199.366601] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1199.366804] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1199.367037] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a2c5d4f8-a8ba-4d73-a4e3-d1beac98dfe5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.386748] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1199.386748] env[69227]: value = "task-3475068" [ 1199.386748] env[69227]: _type = "Task" [ 1199.386748] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.397734] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475068, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.613292] env[69227]: DEBUG nova.compute.manager [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Received event network-changed-17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1199.613605] env[69227]: DEBUG nova.compute.manager [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Refreshing instance network info cache due to event network-changed-17c8935c-c67f-4c80-8da9-23a4db6c4ad8. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1199.613736] env[69227]: DEBUG oslo_concurrency.lockutils [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] Acquiring lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.613829] env[69227]: DEBUG oslo_concurrency.lockutils [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] Acquired lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.613986] env[69227]: DEBUG nova.network.neutron [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Refreshing network info cache for port 17c8935c-c67f-4c80-8da9-23a4db6c4ad8 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1199.895605] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475068, 'name': CreateVM_Task, 'duration_secs': 0.297836} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.895778] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1199.896447] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.896611] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.896924] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1199.897175] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1d349f4-b44a-4374-98ab-e113a90fff04 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.901135] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1199.901135] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]525862f6-15fc-fae2-3f78-05fc1fb79c33" [ 1199.901135] env[69227]: _type = "Task" [ 1199.901135] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.908016] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]525862f6-15fc-fae2-3f78-05fc1fb79c33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.304789] env[69227]: DEBUG nova.network.neutron [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Updated VIF entry in instance network info cache for port 17c8935c-c67f-4c80-8da9-23a4db6c4ad8. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1200.305157] env[69227]: DEBUG nova.network.neutron [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Updating instance_info_cache with network_info: [{"id": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "address": "fa:16:3e:a6:3f:e5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17c8935c-c6", "ovs_interfaceid": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.410767] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.411078] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1200.411245] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.807974] env[69227]: DEBUG oslo_concurrency.lockutils [req-181a9fe8-0b24-4dfb-b468-290d6e69c56c req-91f29d9e-5316-4a10-9708-76385ff8aec1 service nova] Releasing lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.153972] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.154322] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.956154] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1215.427194] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1216.137447] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "6085a4f8-f595-417c-9d33-22376a687be6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.137685] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.438191] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "891a992b-5cbb-404e-8225-3ada55327def" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.439255] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.216068] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.216635] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.423375] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.426979] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.427148] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1220.427268] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1220.932727] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.932727] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.932727] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.932727] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.932727] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933144] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933144] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933144] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933311] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933367] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1220.933512] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1220.933768] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.437036] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.437284] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1221.437284] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.437425] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1221.438454] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2b59b7-5b66-41df-85d6-95b862149893 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.446897] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b3cc8-bffb-4362-9d83-d6a661f18570 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.460227] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458d7823-b4d2-4562-806d-a802b2c1e13b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.466829] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23d8a6b-8cf4-4953-b77e-2e40eda30ab0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.496859] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180944MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1221.497027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1221.497259] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1222.529061] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.529323] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1222.530391] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1223.034615] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2b465fda-e6e8-473a-b17e-e5de876c171d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1223.537957] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1224.041191] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 50908bce-98db-4f89-b4e2-81e059044088 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1224.544953] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1225.048233] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance db33565c-80fa-419c-8f46-bb38e6b7e7ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1225.551526] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5512573f-d965-4cb0-acfd-6ba248e4774f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.055122] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 96086386-f894-4ce9-9fc0-ea710b5cca44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1226.558822] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1227.062666] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance a7f82862-e8aa-42f8-8bbe-38f068f0e6ca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1227.564722] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1228.067582] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1228.571532] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.074620] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1229.076452] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1229.076452] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1229.399939] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed30f7df-65af-4c68-8417-9b90292ad861 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.408144] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d69d7cc-1928-4382-aa84-b34d14022fa4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.438817] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956de9ce-aebd-44c5-a3b9-e5d7987ac512 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.446424] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7abfe27-6181-4f19-b2fe-498704304c70 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.461126] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1229.964718] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1230.469029] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1230.469282] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.972s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.962693] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.962925] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.963162] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.963461] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1230.963618] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1230.994659] env[69227]: WARNING oslo_vmware.rw_handles [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1230.994659] env[69227]: ERROR oslo_vmware.rw_handles [ 1230.995432] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1230.997209] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1230.997483] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Copying Virtual Disk [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/1b91d2b9-45b4-4b18-8449-bea75f494bc7/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1230.997778] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-780c6111-8bda-480d-9e9b-1885046c8651 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.006433] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for the task: (returnval){ [ 1231.006433] env[69227]: value = "task-3475069" [ 1231.006433] env[69227]: _type = "Task" [ 1231.006433] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.014302] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Task: {'id': task-3475069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.519667] env[69227]: DEBUG oslo_vmware.exceptions [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1231.519667] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1231.519667] env[69227]: ERROR nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1231.519667] env[69227]: Faults: ['InvalidArgument'] [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Traceback (most recent call last): [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] yield resources [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self.driver.spawn(context, instance, image_meta, [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self._fetch_image_if_missing(context, vi) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] image_cache(vi, tmp_image_ds_loc) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] vm_util.copy_virtual_disk( [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] session._wait_for_task(vmdk_copy_task) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return self.wait_for_task(task_ref) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return evt.wait() [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] result = hub.switch() [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return self.greenlet.switch() [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self.f(*self.args, **self.kw) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] raise exceptions.translate_fault(task_info.error) [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Faults: ['InvalidArgument'] [ 1231.519667] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] [ 1231.519667] env[69227]: INFO nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Terminating instance [ 1231.521333] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.521333] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1231.521535] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bdd7e1e5-63af-4bbc-8735-a2d6a5167e27 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.525145] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1231.525145] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1231.525995] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb40c445-02c0-4252-8bb5-295efc183835 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.533124] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1231.534221] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2e19b24-abfa-4a3e-96b1-1fd8db354177 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.536159] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1231.536159] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1231.536563] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4889b746-a279-4d3c-9f43-a13172b380e4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.541763] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for the task: (returnval){ [ 1231.541763] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e3b4c9-d12e-006f-a704-80f34c36abed" [ 1231.541763] env[69227]: _type = "Task" [ 1231.541763] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.549512] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e3b4c9-d12e-006f-a704-80f34c36abed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.605141] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1231.605381] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1231.605561] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Deleting the datastore file [datastore2] f77adbc9-4a34-438e-8e0c-ddab0d1f4603 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1231.605861] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6bc79df-0472-465e-b9d5-92738a3759b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.611940] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for the task: (returnval){ [ 1231.611940] env[69227]: value = "task-3475071" [ 1231.611940] env[69227]: _type = "Task" [ 1231.611940] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.619987] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Task: {'id': task-3475071, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.052093] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1232.052457] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Creating directory with path [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.052644] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4aed4fd8-5d79-4d75-aa01-c8f2a904ab9d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.065930] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Created directory with path [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.066165] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Fetch image to [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1232.066405] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1232.067101] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b97de3e5-6457-4214-9a61-2b77ffe0b516 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.073925] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79249a4c-647e-4936-9fd5-acba6c6a6fea {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.082812] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d064dca-db13-41a4-8399-702d3794ecb1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.117222] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9b72d8-79e3-425d-8e27-836a208a26c8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.125756] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4e89b2f1-91d8-4d67-a14e-a31f87d208e0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.127423] env[69227]: DEBUG oslo_vmware.api [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Task: {'id': task-3475071, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07785} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.127650] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1232.127868] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1232.127991] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1232.128195] env[69227]: INFO nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1232.130243] env[69227]: DEBUG nova.compute.claims [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1232.130419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.130647] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.148698] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1232.204645] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1232.266905] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1232.267130] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1232.912972] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c37d9cf6-eaa1-4664-9107-a8437da992d1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.920630] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94bcae5-845c-4d23-b628-61dba737631e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.951222] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda61b90-6694-4e0f-a29c-68b5cc02a453 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.958724] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8aed679-94f4-451e-8f9f-033ac50c03ea {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.972691] env[69227]: DEBUG nova.compute.provider_tree [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.475701] env[69227]: DEBUG nova.scheduler.client.report [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1233.981358] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.850s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.981980] env[69227]: ERROR nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1233.981980] env[69227]: Faults: ['InvalidArgument'] [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Traceback (most recent call last): [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self.driver.spawn(context, instance, image_meta, [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self._fetch_image_if_missing(context, vi) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] image_cache(vi, tmp_image_ds_loc) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] vm_util.copy_virtual_disk( [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] session._wait_for_task(vmdk_copy_task) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return self.wait_for_task(task_ref) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return evt.wait() [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] result = hub.switch() [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] return self.greenlet.switch() [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] self.f(*self.args, **self.kw) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] raise exceptions.translate_fault(task_info.error) [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Faults: ['InvalidArgument'] [ 1233.981980] env[69227]: ERROR nova.compute.manager [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] [ 1233.982979] env[69227]: DEBUG nova.compute.utils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1233.984537] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Build of instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 was re-scheduled: A specified parameter was not correct: fileType [ 1233.984537] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1233.984911] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1233.985094] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1233.985268] env[69227]: DEBUG nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1233.985427] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1234.822607] env[69227]: DEBUG nova.network.neutron [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.325988] env[69227]: INFO nova.compute.manager [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Took 1.34 seconds to deallocate network for instance. [ 1236.139940] env[69227]: DEBUG oslo_concurrency.lockutils [None req-698f67a2-db11-43b3-9e21-45ca66e008fc tempest-ServerActionsTestOtherB-114701388 tempest-ServerActionsTestOtherB-114701388-project-member] Acquiring lock "ce3124b0-4a17-470a-bdb1-164c53a26f37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.140234] env[69227]: DEBUG oslo_concurrency.lockutils [None req-698f67a2-db11-43b3-9e21-45ca66e008fc tempest-ServerActionsTestOtherB-114701388 tempest-ServerActionsTestOtherB-114701388-project-member] Lock "ce3124b0-4a17-470a-bdb1-164c53a26f37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.236798] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c8a08a0b-bac7-45ba-b464-c0ced68539e6 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "cccd529c-d780-404d-b222-7c0213363dce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.238027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c8a08a0b-bac7-45ba-b464-c0ced68539e6 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "cccd529c-d780-404d-b222-7c0213363dce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.355172] env[69227]: INFO nova.scheduler.client.report [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Deleted allocations for instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 [ 1236.865859] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c6cb04ab-acdc-40ba-85a1-3176ae20642d tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.511s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.867607] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 427.774s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.867607] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Acquiring lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.868082] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.868082] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1236.871523] env[69227]: INFO nova.compute.manager [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Terminating instance [ 1236.873225] env[69227]: DEBUG nova.compute.manager [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1236.873434] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1236.873696] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41a3c5ef-b80b-4c05-b8aa-0e1d3d50cb59 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.883181] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d933d8a6-fe68-41ac-9b08-41b65e96d858 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.911925] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f77adbc9-4a34-438e-8e0c-ddab0d1f4603 could not be found. [ 1236.912147] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1236.912354] env[69227]: INFO nova.compute.manager [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1236.912591] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1236.912812] env[69227]: DEBUG nova.compute.manager [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1236.912907] env[69227]: DEBUG nova.network.neutron [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1237.370497] env[69227]: DEBUG nova.compute.manager [None req-d72da386-f693-42ae-b05d-8e98f8724974 tempest-ServerRescueTestJSONUnderV235-1135676851 tempest-ServerRescueTestJSONUnderV235-1135676851-project-member] [instance: f9e2c26e-20e3-4333-8437-53dd42d1a1e6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1237.432624] env[69227]: DEBUG nova.network.neutron [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1237.875982] env[69227]: DEBUG nova.compute.manager [None req-d72da386-f693-42ae-b05d-8e98f8724974 tempest-ServerRescueTestJSONUnderV235-1135676851 tempest-ServerRescueTestJSONUnderV235-1135676851-project-member] [instance: f9e2c26e-20e3-4333-8437-53dd42d1a1e6] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1237.935418] env[69227]: INFO nova.compute.manager [-] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] Took 1.02 seconds to deallocate network for instance. [ 1238.236307] env[69227]: DEBUG oslo_concurrency.lockutils [None req-853b5102-efcd-4317-b983-ac7c466cd33b tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "7eaa0907-ab53-4499-a0d3-723a348279b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1238.237224] env[69227]: DEBUG oslo_concurrency.lockutils [None req-853b5102-efcd-4317-b983-ac7c466cd33b tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "7eaa0907-ab53-4499-a0d3-723a348279b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.389512] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d72da386-f693-42ae-b05d-8e98f8724974 tempest-ServerRescueTestJSONUnderV235-1135676851 tempest-ServerRescueTestJSONUnderV235-1135676851-project-member] Lock "f9e2c26e-20e3-4333-8437-53dd42d1a1e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.586s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.893631] env[69227]: DEBUG nova.compute.manager [None req-d5f4ba83-1cf5-4ea3-b368-520fae6836b8 tempest-ImagesNegativeTestJSON-434565976 tempest-ImagesNegativeTestJSON-434565976-project-member] [instance: 3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1238.959769] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8c48fc5b-3ba7-48d2-9410-cf8eb64d11d1 tempest-ServerAddressesTestJSON-335475544 tempest-ServerAddressesTestJSON-335475544-project-member] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.093s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.960787] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 66.905s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1238.960990] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f77adbc9-4a34-438e-8e0c-ddab0d1f4603] During sync_power_state the instance has a pending task (deleting). Skip. [ 1238.961188] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "f77adbc9-4a34-438e-8e0c-ddab0d1f4603" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.399197] env[69227]: DEBUG nova.compute.manager [None req-d5f4ba83-1cf5-4ea3-b368-520fae6836b8 tempest-ImagesNegativeTestJSON-434565976 tempest-ImagesNegativeTestJSON-434565976-project-member] [instance: 3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1239.914151] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d5f4ba83-1cf5-4ea3-b368-520fae6836b8 tempest-ImagesNegativeTestJSON-434565976 tempest-ImagesNegativeTestJSON-434565976-project-member] Lock "3f23fae3-9cc8-454d-b2fb-fe4ab87d23ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.341s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.417222] env[69227]: DEBUG nova.compute.manager [None req-6f9b95ab-2c0b-4a62-a7aa-f37218642acb tempest-ServersTestManualDisk-770401789 tempest-ServersTestManualDisk-770401789-project-member] [instance: e5db927a-625f-49c5-8f82-041550fcac67] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1240.921679] env[69227]: DEBUG nova.compute.manager [None req-6f9b95ab-2c0b-4a62-a7aa-f37218642acb tempest-ServersTestManualDisk-770401789 tempest-ServersTestManualDisk-770401789-project-member] [instance: e5db927a-625f-49c5-8f82-041550fcac67] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1241.435300] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6f9b95ab-2c0b-4a62-a7aa-f37218642acb tempest-ServersTestManualDisk-770401789 tempest-ServersTestManualDisk-770401789-project-member] Lock "e5db927a-625f-49c5-8f82-041550fcac67" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.427s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1241.585509] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e7eb5cb2-d0fa-4fcf-b29a-0dc5cee67310 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "341cf5ae-4ae9-4dd3-a6a8-19eb95189221" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1241.585804] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e7eb5cb2-d0fa-4fcf-b29a-0dc5cee67310 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "341cf5ae-4ae9-4dd3-a6a8-19eb95189221" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.937698] env[69227]: DEBUG nova.compute.manager [None req-4ff2042e-5db6-4fa6-b980-d2cf2dbbea66 tempest-ServerMetadataNegativeTestJSON-1039436067 tempest-ServerMetadataNegativeTestJSON-1039436067-project-member] [instance: 74c8d4ad-fa82-488d-aad6-b952061ef2c6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1242.596479] env[69227]: DEBUG nova.compute.manager [None req-4ff2042e-5db6-4fa6-b980-d2cf2dbbea66 tempest-ServerMetadataNegativeTestJSON-1039436067 tempest-ServerMetadataNegativeTestJSON-1039436067-project-member] [instance: 74c8d4ad-fa82-488d-aad6-b952061ef2c6] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1243.112027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-4ff2042e-5db6-4fa6-b980-d2cf2dbbea66 tempest-ServerMetadataNegativeTestJSON-1039436067 tempest-ServerMetadataNegativeTestJSON-1039436067-project-member] Lock "74c8d4ad-fa82-488d-aad6-b952061ef2c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.452s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.614182] env[69227]: DEBUG nova.compute.manager [None req-aab74667-45be-432b-a455-efd965708b91 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 2b465fda-e6e8-473a-b17e-e5de876c171d] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1244.118319] env[69227]: DEBUG nova.compute.manager [None req-aab74667-45be-432b-a455-efd965708b91 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] [instance: 2b465fda-e6e8-473a-b17e-e5de876c171d] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1244.631281] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aab74667-45be-432b-a455-efd965708b91 tempest-AttachVolumeNegativeTest-2021245725 tempest-AttachVolumeNegativeTest-2021245725-project-member] Lock "2b465fda-e6e8-473a-b17e-e5de876c171d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.602s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.134031] env[69227]: DEBUG nova.compute.manager [None req-448222d2-301e-422a-b0e4-18a95c29d597 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1245.638844] env[69227]: DEBUG nova.compute.manager [None req-448222d2-301e-422a-b0e4-18a95c29d597 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] [instance: 094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1246.153079] env[69227]: DEBUG oslo_concurrency.lockutils [None req-448222d2-301e-422a-b0e4-18a95c29d597 tempest-AttachInterfacesTestJSON-1604384750 tempest-AttachInterfacesTestJSON-1604384750-project-member] Lock "094aa5e5-f2ce-4ad1-8dbe-bcfe3f0c93d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.808s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.655589] env[69227]: DEBUG nova.compute.manager [None req-c4b9d06b-d6a8-43b5-be52-a161f3dcefb0 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 50908bce-98db-4f89-b4e2-81e059044088] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1247.161541] env[69227]: DEBUG nova.compute.manager [None req-c4b9d06b-d6a8-43b5-be52-a161f3dcefb0 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 50908bce-98db-4f89-b4e2-81e059044088] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1247.678654] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4b9d06b-d6a8-43b5-be52-a161f3dcefb0 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "50908bce-98db-4f89-b4e2-81e059044088" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 208.870s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.181016] env[69227]: DEBUG nova.compute.manager [None req-2e16a568-a258-40f6-927d-c9e6dcdd4fd2 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1248.685457] env[69227]: DEBUG nova.compute.manager [None req-2e16a568-a258-40f6-927d-c9e6dcdd4fd2 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1249.201959] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2e16a568-a258-40f6-927d-c9e6dcdd4fd2 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "2716e5e0-0bfe-4a8e-9b4f-dcdbb03cd9ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.960s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1249.705490] env[69227]: DEBUG nova.compute.manager [None req-8100984d-9fd7-4cf8-ac39-6c45a3e9cb2c tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: db33565c-80fa-419c-8f46-bb38e6b7e7ed] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1250.209819] env[69227]: DEBUG nova.compute.manager [None req-8100984d-9fd7-4cf8-ac39-6c45a3e9cb2c tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: db33565c-80fa-419c-8f46-bb38e6b7e7ed] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1250.722147] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8100984d-9fd7-4cf8-ac39-6c45a3e9cb2c tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "db33565c-80fa-419c-8f46-bb38e6b7e7ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 206.062s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.225185] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1251.750766] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.751125] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.752665] env[69227]: INFO nova.compute.claims [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1252.400484] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "5512573f-d965-4cb0-acfd-6ba248e4774f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1252.999911] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fadb9922-51a6-4ad2-b923-39c09014dd01 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.007579] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900c39e7-35f8-49d5-81b8-d3848ba36b1a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.036512] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ff40ce-e2fe-4446-ae27-31957e16821d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.043047] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ef4e59-a9d9-4a48-bf89-fbb5559c89f2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.055513] env[69227]: DEBUG nova.compute.provider_tree [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1253.558820] env[69227]: DEBUG nova.scheduler.client.report [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1254.063929] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.313s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1254.064488] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1254.568354] env[69227]: DEBUG nova.compute.claims [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1254.568538] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.568794] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.309299] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c0e5b6-f071-4bea-93e1-14a3a6e9be99 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.317084] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd81e4d-5245-4aee-a576-89f2d57501e3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.347420] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc8c7878-6e00-4479-bfba-005142b23538 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.355182] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d154a08-169b-4343-89bc-870741e8a691 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.368390] env[69227]: DEBUG nova.compute.provider_tree [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.871926] env[69227]: DEBUG nova.scheduler.client.report [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1256.377194] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.808s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.377966] env[69227]: DEBUG nova.compute.utils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Conflict updating instance 5512573f-d965-4cb0-acfd-6ba248e4774f. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1256.379837] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance disappeared during build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 1256.380043] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1256.380303] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1256.380574] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1256.380691] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1256.976967] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1257.044828] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1257.548587] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1257.548872] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1257.549892] env[69227]: DEBUG nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1257.549892] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1257.563018] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1258.066735] env[69227]: DEBUG nova.network.neutron [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.572186] env[69227]: INFO nova.compute.manager [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Took 1.02 seconds to deallocate network for instance. [ 1259.604242] env[69227]: INFO nova.scheduler.client.report [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleted allocations for instance 5512573f-d965-4cb0-acfd-6ba248e4774f [ 1259.604482] env[69227]: DEBUG oslo_concurrency.lockutils [None req-33d4111d-871f-48f6-bd54-151124f61bd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.576s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.605750] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 7.205s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.605876] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "5512573f-d965-4cb0-acfd-6ba248e4774f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1259.606097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1259.606268] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1259.608083] env[69227]: INFO nova.compute.manager [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Terminating instance [ 1259.609513] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1259.609664] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1259.609831] env[69227]: DEBUG nova.network.neutron [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1260.110824] env[69227]: DEBUG nova.compute.manager [None req-c865cf00-48bf-41d7-9c55-7c46e7991be5 tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] [instance: 96086386-f894-4ce9-9fc0-ea710b5cca44] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1260.134211] env[69227]: DEBUG nova.network.neutron [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1260.260100] env[69227]: DEBUG nova.network.neutron [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1260.612715] env[69227]: DEBUG nova.compute.manager [None req-c865cf00-48bf-41d7-9c55-7c46e7991be5 tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] [instance: 96086386-f894-4ce9-9fc0-ea710b5cca44] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1260.762676] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "refresh_cache-5512573f-d965-4cb0-acfd-6ba248e4774f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1260.763109] env[69227]: DEBUG nova.compute.manager [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1260.763311] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1260.763644] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a8fd00a4-b055-439a-bb8a-95e1cffd44b1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.773330] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae8386e-a648-451c-9ac9-47d0062c15cb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.801460] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5512573f-d965-4cb0-acfd-6ba248e4774f could not be found. [ 1260.801665] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1260.801853] env[69227]: INFO nova.compute.manager [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1260.802177] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1260.802412] env[69227]: DEBUG nova.compute.manager [-] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1260.802530] env[69227]: DEBUG nova.network.neutron [-] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1260.820067] env[69227]: DEBUG nova.network.neutron [-] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1261.128892] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c865cf00-48bf-41d7-9c55-7c46e7991be5 tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Lock "96086386-f894-4ce9-9fc0-ea710b5cca44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.813s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.321590] env[69227]: DEBUG nova.network.neutron [-] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.632299] env[69227]: DEBUG nova.compute.manager [None req-3b43390a-42a4-4311-ab7a-3304027a9b1a tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] [instance: aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1261.825149] env[69227]: INFO nova.compute.manager [-] [instance: 5512573f-d965-4cb0-acfd-6ba248e4774f] Took 1.02 seconds to deallocate network for instance. [ 1262.136898] env[69227]: DEBUG nova.compute.manager [None req-3b43390a-42a4-4311-ab7a-3304027a9b1a tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] [instance: aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1262.340602] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.340965] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.362761] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "c2ccfca6-9703-429d-ba58-ee7b155edfce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.362970] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "c2ccfca6-9703-429d-ba58-ee7b155edfce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.388468] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "cc2e221c-f064-4aea-8316-c2d2e01d0597" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.388710] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc2e221c-f064-4aea-8316-c2d2e01d0597" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.649503] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3b43390a-42a4-4311-ab7a-3304027a9b1a tempest-ServerRescueNegativeTestJSON-933979334 tempest-ServerRescueNegativeTestJSON-933979334-project-member] Lock "aed2e226-4eb6-498a-b3b9-8de6e0a6b9a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.787s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.858924] env[69227]: DEBUG oslo_concurrency.lockutils [None req-513f6baf-b31d-4434-a09f-e35c4324bb7e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "5512573f-d965-4cb0-acfd-6ba248e4774f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.253s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.152625] env[69227]: DEBUG nova.compute.manager [None req-aa7e9190-c269-420a-b8e2-49917857a4a4 tempest-InstanceActionsTestJSON-1348339765 tempest-InstanceActionsTestJSON-1348339765-project-member] [instance: a7f82862-e8aa-42f8-8bbe-38f068f0e6ca] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1263.656944] env[69227]: DEBUG nova.compute.manager [None req-aa7e9190-c269-420a-b8e2-49917857a4a4 tempest-InstanceActionsTestJSON-1348339765 tempest-InstanceActionsTestJSON-1348339765-project-member] [instance: a7f82862-e8aa-42f8-8bbe-38f068f0e6ca] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1263.885879] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0cce11cb-6327-444b-9d3c-5483ecc1cb4e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "d61f136b-edb3-4971-a8f5-c96dd86bd2f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.885879] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0cce11cb-6327-444b-9d3c-5483ecc1cb4e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "d61f136b-edb3-4971-a8f5-c96dd86bd2f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.169921] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa7e9190-c269-420a-b8e2-49917857a4a4 tempest-InstanceActionsTestJSON-1348339765 tempest-InstanceActionsTestJSON-1348339765-project-member] Lock "a7f82862-e8aa-42f8-8bbe-38f068f0e6ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.268s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.672762] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1265.194191] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1265.194468] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.195908] env[69227]: INFO nova.compute.claims [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1266.434061] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0500d9-2d9f-4079-9461-d02b89b97043 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.441779] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197b7fb7-ed4f-4bbf-b495-37ea4b8f5226 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.470564] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056f7d58-7173-4162-b3c9-279076ef5974 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.477882] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8981f1-b337-46be-9329-c2122d8781be {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.493032] env[69227]: DEBUG nova.compute.provider_tree [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1266.995891] env[69227]: DEBUG nova.scheduler.client.report [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1267.501063] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.306s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.501354] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1268.006174] env[69227]: DEBUG nova.compute.utils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1268.007421] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1268.007597] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1268.059919] env[69227]: DEBUG nova.policy [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2b868d09c1ca46a9a90e5c62dc1936ec', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7fb2e3c96ee642cdb8f849b0277bc444', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1268.424640] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Successfully created port: a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1268.511788] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1269.521832] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1269.546771] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1269.547015] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1269.547182] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1269.547363] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1269.547511] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1269.547660] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1269.547868] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1269.548034] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1269.548203] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1269.548362] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1269.548530] env[69227]: DEBUG nova.virt.hardware [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1269.549391] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7039977d-3c7c-4677-97a8-53e332055c29 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.557171] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29aff224-38f8-4243-bc57-941d31db4e96 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.899769] env[69227]: DEBUG nova.compute.manager [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Received event network-vif-plugged-a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1269.900083] env[69227]: DEBUG oslo_concurrency.lockutils [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] Acquiring lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.900264] env[69227]: DEBUG oslo_concurrency.lockutils [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1269.900380] env[69227]: DEBUG oslo_concurrency.lockutils [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.900575] env[69227]: DEBUG nova.compute.manager [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] No waiting events found dispatching network-vif-plugged-a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1269.900790] env[69227]: WARNING nova.compute.manager [req-b26e5368-76ae-4c6c-9260-3b206599b310 req-ee2ef234-d715-4cfc-8768-e06d2b638bd4 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Received unexpected event network-vif-plugged-a122bfbe-5250-4879-ace2-9d9fe7fb9308 for instance with vm_state building and task_state spawning. [ 1269.980832] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Successfully updated port: a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1270.484310] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.484472] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquired lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.484625] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1271.015346] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1271.180654] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Updating instance_info_cache with network_info: [{"id": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "address": "fa:16:3e:04:da:68", "network": {"id": "160fae4f-50a9-40b7-8fcf-c6e5dc713c4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-389593410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb2e3c96ee642cdb8f849b0277bc444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa122bfbe-52", "ovs_interfaceid": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1271.684071] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Releasing lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1271.684299] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance network_info: |[{"id": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "address": "fa:16:3e:04:da:68", "network": {"id": "160fae4f-50a9-40b7-8fcf-c6e5dc713c4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-389593410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb2e3c96ee642cdb8f849b0277bc444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa122bfbe-52", "ovs_interfaceid": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1271.684736] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:da:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '975b168a-03e5-449d-95ac-4d51ba027242', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a122bfbe-5250-4879-ace2-9d9fe7fb9308', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1271.692354] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Creating folder: Project (7fb2e3c96ee642cdb8f849b0277bc444). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1271.692663] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af6dd8c0-7fb0-46c0-b173-a54f28aa6182 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.704271] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Created folder: Project (7fb2e3c96ee642cdb8f849b0277bc444) in parent group-v694623. [ 1271.704448] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Creating folder: Instances. Parent ref: group-v694687. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1271.704659] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fda3c78-ce83-4a9a-976d-829650789f9f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.712856] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Created folder: Instances in parent group-v694687. [ 1271.713082] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1271.713258] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1271.713440] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-105ecd8b-d272-43b1-a437-552b5206845e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.731260] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1271.731260] env[69227]: value = "task-3475074" [ 1271.731260] env[69227]: _type = "Task" [ 1271.731260] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.738287] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475074, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.925960] env[69227]: DEBUG nova.compute.manager [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Received event network-changed-a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1271.926181] env[69227]: DEBUG nova.compute.manager [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Refreshing instance network info cache due to event network-changed-a122bfbe-5250-4879-ace2-9d9fe7fb9308. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1271.926463] env[69227]: DEBUG oslo_concurrency.lockutils [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] Acquiring lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1271.926656] env[69227]: DEBUG oslo_concurrency.lockutils [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] Acquired lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1271.926914] env[69227]: DEBUG nova.network.neutron [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Refreshing network info cache for port a122bfbe-5250-4879-ace2-9d9fe7fb9308 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1272.241862] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475074, 'name': CreateVM_Task, 'duration_secs': 0.292512} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1272.242158] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1272.248297] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1272.248424] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1272.248733] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1272.248973] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fee1b18-39c7-46e0-b4bb-9733aa24a2fa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.253471] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for the task: (returnval){ [ 1272.253471] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5251dc94-5a2b-7a1b-6d5d-789a3cdffa6c" [ 1272.253471] env[69227]: _type = "Task" [ 1272.253471] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.260669] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5251dc94-5a2b-7a1b-6d5d-789a3cdffa6c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1272.661242] env[69227]: DEBUG nova.network.neutron [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Updated VIF entry in instance network info cache for port a122bfbe-5250-4879-ace2-9d9fe7fb9308. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1272.661594] env[69227]: DEBUG nova.network.neutron [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Updating instance_info_cache with network_info: [{"id": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "address": "fa:16:3e:04:da:68", "network": {"id": "160fae4f-50a9-40b7-8fcf-c6e5dc713c4d", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-389593410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7fb2e3c96ee642cdb8f849b0277bc444", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "975b168a-03e5-449d-95ac-4d51ba027242", "external-id": "nsx-vlan-transportzone-365", "segmentation_id": 365, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa122bfbe-52", "ovs_interfaceid": "a122bfbe-5250-4879-ace2-9d9fe7fb9308", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.763894] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.764142] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1272.764354] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.165058] env[69227]: DEBUG oslo_concurrency.lockutils [req-19ea895c-02b5-40b7-ae7d-8f4bec4ebb7a req-d1e2d645-e579-4bdd-9f86-d504e9873ca7 service nova] Releasing lock "refresh_cache-d39f7ea0-82f7-490b-94cf-1c3c19806c7f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.427889] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.427076] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1279.890041] env[69227]: WARNING oslo_vmware.rw_handles [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1279.890041] env[69227]: ERROR oslo_vmware.rw_handles [ 1279.890041] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1279.892013] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1279.892013] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Copying Virtual Disk [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/29d9b085-827c-471f-a9a4-baf5bbeabbc3/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1279.892178] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8d152eb0-aec5-424a-acbb-ec1f880438bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.899922] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for the task: (returnval){ [ 1279.899922] env[69227]: value = "task-3475075" [ 1279.899922] env[69227]: _type = "Task" [ 1279.899922] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.907603] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Task: {'id': task-3475075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.410120] env[69227]: DEBUG oslo_vmware.exceptions [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1280.410405] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1280.411012] env[69227]: ERROR nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1280.411012] env[69227]: Faults: ['InvalidArgument'] [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Traceback (most recent call last): [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] yield resources [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self.driver.spawn(context, instance, image_meta, [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self._fetch_image_if_missing(context, vi) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] image_cache(vi, tmp_image_ds_loc) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] vm_util.copy_virtual_disk( [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] session._wait_for_task(vmdk_copy_task) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return self.wait_for_task(task_ref) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return evt.wait() [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] result = hub.switch() [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return self.greenlet.switch() [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self.f(*self.args, **self.kw) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] raise exceptions.translate_fault(task_info.error) [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Faults: ['InvalidArgument'] [ 1280.411012] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] [ 1280.412585] env[69227]: INFO nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Terminating instance [ 1280.412825] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.413894] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.414533] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1280.414724] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1280.414978] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-07ecb1c5-bb59-4020-9e33-bf0ee2384866 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.417202] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e407f5ff-02e5-4549-8d82-a94803bb96e6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.423677] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1280.423867] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d698f7a1-fcc0-4a73-9b55-1eb248f748cf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.425925] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.426584] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1280.427078] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e724c3ef-31bd-4b61-84fa-ea2dfa559944 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.431485] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1280.431485] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52756ed7-7fb4-9213-0c12-f975ea797697" [ 1280.431485] env[69227]: _type = "Task" [ 1280.431485] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.438341] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52756ed7-7fb4-9213-0c12-f975ea797697, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.489512] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1280.489737] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1280.489920] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Deleting the datastore file [datastore2] be8dae7e-b829-455a-b8d3-73fb04c40128 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1280.490210] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3fd1383c-b5e1-45a3-8939-f3c19bed6ba9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.495741] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for the task: (returnval){ [ 1280.495741] env[69227]: value = "task-3475077" [ 1280.495741] env[69227]: _type = "Task" [ 1280.495741] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.504594] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Task: {'id': task-3475077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.941677] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1280.942042] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating directory with path [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1280.942174] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d77b1b83-fd56-44e5-933b-4c6cdc9d5993 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.953390] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created directory with path [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1280.953589] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Fetch image to [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1280.953788] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1280.954543] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebe2b91d-3322-4b53-9339-29323ea04fee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.961099] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855e177f-d20e-454d-b504-bbbe9a92923e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.969755] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1671ef-0606-4526-8ca2-77d58ad1ba20 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.002823] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7124f0c9-9f91-494c-a997-8c9a4a1c0473 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.011190] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a7a936c4-54b5-4cc7-bb23-420079bba22c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.013050] env[69227]: DEBUG oslo_vmware.api [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Task: {'id': task-3475077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07771} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1281.013146] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1281.013302] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1281.013472] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1281.013645] env[69227]: INFO nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1281.016104] env[69227]: DEBUG nova.compute.claims [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1281.016281] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1281.016506] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1281.033682] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1281.083158] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1281.142846] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1281.143064] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1281.422496] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.426214] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.426409] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1281.738835] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c30af3-c781-4942-b13f-f8f6118c36b7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.747064] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8956e29-7dc3-445a-8b70-e36eddb4fd5f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.775889] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44129c2-8200-4f10-b1dc-924aebc4f31b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.782470] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c7baef-7b81-4760-9370-0164d902a96d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.794861] env[69227]: DEBUG nova.compute.provider_tree [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1281.929948] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.298467] env[69227]: DEBUG nova.scheduler.client.report [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1282.803483] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.787s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.804094] env[69227]: ERROR nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.804094] env[69227]: Faults: ['InvalidArgument'] [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Traceback (most recent call last): [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self.driver.spawn(context, instance, image_meta, [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self._fetch_image_if_missing(context, vi) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] image_cache(vi, tmp_image_ds_loc) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] vm_util.copy_virtual_disk( [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] session._wait_for_task(vmdk_copy_task) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return self.wait_for_task(task_ref) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return evt.wait() [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] result = hub.switch() [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] return self.greenlet.switch() [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] self.f(*self.args, **self.kw) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] raise exceptions.translate_fault(task_info.error) [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Faults: ['InvalidArgument'] [ 1282.804094] env[69227]: ERROR nova.compute.manager [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] [ 1282.805260] env[69227]: DEBUG nova.compute.utils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1282.805972] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.876s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1282.806174] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1282.806325] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1282.806828] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Build of instance be8dae7e-b829-455a-b8d3-73fb04c40128 was re-scheduled: A specified parameter was not correct: fileType [ 1282.806828] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1282.807205] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1282.807373] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1282.807541] env[69227]: DEBUG nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1282.807705] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1282.809844] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd023391-ea48-41d4-979b-fd0ef3d5ebbe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.818117] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9769aa-a89a-49b8-948f-bd1b87830a69 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.831440] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4305c91c-1173-4802-82cf-81f87470a66a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.837456] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc4e0ef-95a6-4b24-a1eb-0db604e0c167 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.866016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180986MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1282.866222] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1282.866399] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.579039] env[69227]: DEBUG nova.network.neutron [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1284.080904] env[69227]: INFO nova.compute.manager [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Took 1.27 seconds to deallocate network for instance. [ 1284.401572] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance be8dae7e-b829-455a-b8d3-73fb04c40128 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1284.401755] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.401882] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402009] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402178] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402253] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402365] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402486] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402578] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.402687] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1284.905351] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1285.111877] env[69227]: INFO nova.scheduler.client.report [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Deleted allocations for instance be8dae7e-b829-455a-b8d3-73fb04c40128 [ 1285.408180] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1285.618852] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ae1012b4-2c7e-4b2d-8d6b-7b1e9e8323fd tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 515.681s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.620182] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 318.789s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.620407] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Acquiring lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.620616] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.620864] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.622768] env[69227]: INFO nova.compute.manager [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Terminating instance [ 1285.624407] env[69227]: DEBUG nova.compute.manager [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1285.624600] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1285.624860] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-08a05b4b-2d2b-4cc7-b797-e1cca8bbd386 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.634342] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f55967-1847-4a64-8b56-c19a82bb410b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.662084] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance be8dae7e-b829-455a-b8d3-73fb04c40128 could not be found. [ 1285.662299] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1285.662475] env[69227]: INFO nova.compute.manager [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1285.662748] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1285.662960] env[69227]: DEBUG nova.compute.manager [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1285.663069] env[69227]: DEBUG nova.network.neutron [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1285.911373] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1286.123379] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1286.180661] env[69227]: DEBUG nova.network.neutron [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1286.414629] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ce3124b0-4a17-470a-bdb1-164c53a26f37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1286.641772] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.683662] env[69227]: INFO nova.compute.manager [-] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] Took 1.02 seconds to deallocate network for instance. [ 1286.917274] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cccd529c-d780-404d-b222-7c0213363dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1287.419890] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7eaa0907-ab53-4499-a0d3-723a348279b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1287.707529] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ebbff990-9d60-4e71-8e72-0248768ef0af tempest-ServerTagsTestJSON-667446649 tempest-ServerTagsTestJSON-667446649-project-member] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.087s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.709028] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 115.653s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.709028] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: be8dae7e-b829-455a-b8d3-73fb04c40128] During sync_power_state the instance has a pending task (deleting). Skip. [ 1287.709028] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "be8dae7e-b829-455a-b8d3-73fb04c40128" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.923549] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 341cf5ae-4ae9-4dd3-a6a8-19eb95189221 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.426436] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1288.929264] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c2ccfca6-9703-429d-ba58-ee7b155edfce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.432729] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc2e221c-f064-4aea-8316-c2d2e01d0597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.935876] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d61f136b-edb3-4971-a8f5-c96dd86bd2f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1289.936180] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1289.936304] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1290.142094] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668d11df-8cee-4525-b980-b820f9bdd0bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.149317] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7559fcb3-1b81-46b4-9179-8a4ab1d3a0cf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.177981] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13123111-b685-4052-bc85-a6fc30fbd039 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.184738] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-856bacb5-5cc1-403c-9c7e-c290e090d76d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.198120] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.701387] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1291.206848] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1291.207097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.341s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.207370] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.566s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.208836] env[69227]: INFO nova.compute.claims [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1292.207509] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.424572] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd4f145-8354-4db8-8074-d43001ab45c1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.432110] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0a0f07-d263-4e83-a233-e71dde40c5f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.461377] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a236ec-8d64-402a-be54-108299528ab9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.467853] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c7295a-f6bb-4818-b27e-50b168b0319c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.481152] env[69227]: DEBUG nova.compute.provider_tree [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.714096] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.714096] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1292.714096] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1292.984233] env[69227]: DEBUG nova.scheduler.client.report [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1293.218545] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.218832] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.218832] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.218978] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219137] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219261] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219383] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219502] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219620] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219736] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1293.219859] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1293.220077] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.220248] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.220395] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.220520] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1293.489470] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.490018] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1293.995047] env[69227]: DEBUG nova.compute.utils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1293.996338] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1293.996512] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1294.033685] env[69227]: DEBUG nova.policy [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7421a3df53b40469e3cc9c4a3a018af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87ac944928f942d1b1f3bffab69ea0dd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1294.329599] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Successfully created port: 53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1294.499629] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1295.508538] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1295.534480] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1295.534716] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1295.534873] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1295.535073] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1295.535230] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1295.535372] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1295.535599] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1295.535779] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1295.535953] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1295.536130] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1295.536303] env[69227]: DEBUG nova.virt.hardware [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1295.537275] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41f30b53-a7d9-4e88-984a-df2dd0fe77ab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.546167] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8880ba0c-38a2-4781-ae05-df49c2d109bc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.745086] env[69227]: DEBUG nova.compute.manager [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Received event network-vif-plugged-53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1295.745519] env[69227]: DEBUG oslo_concurrency.lockutils [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] Acquiring lock "6085a4f8-f595-417c-9d33-22376a687be6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.745889] env[69227]: DEBUG oslo_concurrency.lockutils [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] Lock "6085a4f8-f595-417c-9d33-22376a687be6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.746211] env[69227]: DEBUG oslo_concurrency.lockutils [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] Lock "6085a4f8-f595-417c-9d33-22376a687be6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.746527] env[69227]: DEBUG nova.compute.manager [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] No waiting events found dispatching network-vif-plugged-53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1295.746833] env[69227]: WARNING nova.compute.manager [req-7448d16c-22e8-4bc7-a2b3-cacb96b97bc7 req-897b3988-c2ba-457b-8e59-6f1f71aaf840 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Received unexpected event network-vif-plugged-53d507a2-f794-495a-9dd2-fd3989fe7a9b for instance with vm_state building and task_state spawning. [ 1295.837558] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Successfully updated port: 53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1296.339731] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1296.339957] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquired lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1296.340133] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1296.885993] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1297.044075] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Updating instance_info_cache with network_info: [{"id": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "address": "fa:16:3e:2d:63:16", "network": {"id": "7b9add03-21e7-4d6a-b157-f9bf8da88d50", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-909126388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "87ac944928f942d1b1f3bffab69ea0dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53d507a2-f7", "ovs_interfaceid": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1297.547246] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Releasing lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1297.547721] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance network_info: |[{"id": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "address": "fa:16:3e:2d:63:16", "network": {"id": "7b9add03-21e7-4d6a-b157-f9bf8da88d50", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-909126388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "87ac944928f942d1b1f3bffab69ea0dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53d507a2-f7", "ovs_interfaceid": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1297.548224] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:63:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd182e8eb-3f6d-4c76-a06e-133dd9b3cd30', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53d507a2-f794-495a-9dd2-fd3989fe7a9b', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1297.555810] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Creating folder: Project (87ac944928f942d1b1f3bffab69ea0dd). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1297.556156] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93421844-f293-4d97-8dae-c6e1fb95a1d1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.569982] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Created folder: Project (87ac944928f942d1b1f3bffab69ea0dd) in parent group-v694623. [ 1297.570188] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Creating folder: Instances. Parent ref: group-v694690. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1297.570421] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7025d190-9cf8-4132-98df-56e2526ca645 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.579950] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Created folder: Instances in parent group-v694690. [ 1297.580209] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1297.580396] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1297.580592] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0589dd39-6713-463b-a9ed-24c11c7b9340 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.599683] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1297.599683] env[69227]: value = "task-3475080" [ 1297.599683] env[69227]: _type = "Task" [ 1297.599683] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.607345] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475080, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.772550] env[69227]: DEBUG nova.compute.manager [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Received event network-changed-53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1297.772750] env[69227]: DEBUG nova.compute.manager [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Refreshing instance network info cache due to event network-changed-53d507a2-f794-495a-9dd2-fd3989fe7a9b. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1297.773015] env[69227]: DEBUG oslo_concurrency.lockutils [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] Acquiring lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.773172] env[69227]: DEBUG oslo_concurrency.lockutils [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] Acquired lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.773334] env[69227]: DEBUG nova.network.neutron [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Refreshing network info cache for port 53d507a2-f794-495a-9dd2-fd3989fe7a9b {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1298.109992] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475080, 'name': CreateVM_Task, 'duration_secs': 0.308049} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.110360] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1298.110998] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.111119] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1298.111446] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1298.111715] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bbf71f7-4092-4a19-a10b-06b16a6d8296 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.116592] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for the task: (returnval){ [ 1298.116592] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524abe1b-647f-993e-a37a-ee6faed35275" [ 1298.116592] env[69227]: _type = "Task" [ 1298.116592] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.124886] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524abe1b-647f-993e-a37a-ee6faed35275, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.551965] env[69227]: DEBUG nova.network.neutron [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Updated VIF entry in instance network info cache for port 53d507a2-f794-495a-9dd2-fd3989fe7a9b. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1298.552545] env[69227]: DEBUG nova.network.neutron [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Updating instance_info_cache with network_info: [{"id": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "address": "fa:16:3e:2d:63:16", "network": {"id": "7b9add03-21e7-4d6a-b157-f9bf8da88d50", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-909126388-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "87ac944928f942d1b1f3bffab69ea0dd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d182e8eb-3f6d-4c76-a06e-133dd9b3cd30", "external-id": "nsx-vlan-transportzone-260", "segmentation_id": 260, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53d507a2-f7", "ovs_interfaceid": "53d507a2-f794-495a-9dd2-fd3989fe7a9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.631230] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1298.631556] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1298.631799] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1298.978309] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.978540] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.055468] env[69227]: DEBUG oslo_concurrency.lockutils [req-6add2cdd-37b1-4c56-a76b-f90c42e3fa65 req-c64c6616-9e96-42bf-86ce-8b2cb3017d27 service nova] Releasing lock "refresh_cache-6085a4f8-f595-417c-9d33-22376a687be6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.348525] env[69227]: DEBUG oslo_concurrency.lockutils [None req-45a8454b-3efd-4641-9fe6-385042711ded tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Acquiring lock "9b7a3c5c-96f1-461f-8bca-50c44315d737" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.348797] env[69227]: DEBUG oslo_concurrency.lockutils [None req-45a8454b-3efd-4641-9fe6-385042711ded tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Lock "9b7a3c5c-96f1-461f-8bca-50c44315d737" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1307.536141] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d9a7e8ef-5f4f-4726-94be-c544b8be83b8 tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Acquiring lock "c540e175-7485-4384-9c45-f8a6b0c64b7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1307.536381] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d9a7e8ef-5f4f-4726-94be-c544b8be83b8 tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Lock "c540e175-7485-4384-9c45-f8a6b0c64b7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1329.906305] env[69227]: WARNING oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1329.906305] env[69227]: ERROR oslo_vmware.rw_handles [ 1329.906872] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1329.908825] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1329.909080] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Copying Virtual Disk [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/27ea053a-0be7-4232-aed7-bdcf5f513251/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1329.909403] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d19d732d-f1f1-40f8-bfc4-fba2ec0b9e47 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.917015] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1329.917015] env[69227]: value = "task-3475081" [ 1329.917015] env[69227]: _type = "Task" [ 1329.917015] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.924631] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.427029] env[69227]: DEBUG oslo_vmware.exceptions [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1330.427357] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1330.427941] env[69227]: ERROR nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.427941] env[69227]: Faults: ['InvalidArgument'] [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Traceback (most recent call last): [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] yield resources [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self.driver.spawn(context, instance, image_meta, [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self._fetch_image_if_missing(context, vi) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] image_cache(vi, tmp_image_ds_loc) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] vm_util.copy_virtual_disk( [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] session._wait_for_task(vmdk_copy_task) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return self.wait_for_task(task_ref) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return evt.wait() [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] result = hub.switch() [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return self.greenlet.switch() [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self.f(*self.args, **self.kw) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] raise exceptions.translate_fault(task_info.error) [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Faults: ['InvalidArgument'] [ 1330.427941] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] [ 1330.428821] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Terminating instance [ 1330.429760] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1330.429967] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.430213] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25b87238-ba2d-4ce0-945d-e6d2f5456836 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.432294] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1330.432488] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1330.433193] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bad285e-7f99-41ae-ac97-d55b5393dfc5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.440114] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1330.441018] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3173d37-d364-4d82-8c51-dfdc173909ab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.442341] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.442514] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1330.443166] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2e1d184-bec8-4ec9-b238-ed8f19e6476a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.447722] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1330.447722] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524bd892-dcf9-5e53-b509-50ef02ab0d52" [ 1330.447722] env[69227]: _type = "Task" [ 1330.447722] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.457272] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]524bd892-dcf9-5e53-b509-50ef02ab0d52, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.502243] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1330.502454] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1330.502632] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleting the datastore file [datastore2] fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1330.502883] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6ec6830-7c0a-4b7f-9413-45d939193e83 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.509043] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1330.509043] env[69227]: value = "task-3475083" [ 1330.509043] env[69227]: _type = "Task" [ 1330.509043] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.516555] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475083, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.962987] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1330.963443] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating directory with path [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1330.963744] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-311197d9-9e76-4276-934d-6e27180a2573 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.976216] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Created directory with path [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1330.976495] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Fetch image to [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1330.976728] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1330.977791] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd3944d-3a4f-45b0-b5f9-adb9901e1d94 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.985564] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb2bd27-71b7-4ea4-8fdf-5c59ae6efe9d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.994474] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd485a6-73c5-4b5a-8758-ef0c6d0b16b4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.028495] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60578a1c-a055-4408-90b5-c5e7eb122cb8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.035152] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475083, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075103} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.036538] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1331.036730] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1331.036898] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1331.037076] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1331.038742] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b930a385-68a4-4437-8f17-9177b5b2edaa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.040510] env[69227]: DEBUG nova.compute.claims [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1331.040679] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1331.040904] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1331.062065] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1331.116485] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1331.177258] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1331.177464] env[69227]: DEBUG oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1331.803016] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de156f49-5d6c-4464-a8b4-63afc4118925 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.810665] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f1f615-7e62-4653-b519-5eb5140694f2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.840040] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd308898-203b-49fb-991b-33ff03b304eb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.846912] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b326851f-303e-44d6-a70b-1b5cdf4803a2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.860631] env[69227]: DEBUG nova.compute.provider_tree [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1332.363775] env[69227]: DEBUG nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1332.869485] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.828s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1332.870068] env[69227]: ERROR nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1332.870068] env[69227]: Faults: ['InvalidArgument'] [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Traceback (most recent call last): [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self.driver.spawn(context, instance, image_meta, [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self._fetch_image_if_missing(context, vi) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] image_cache(vi, tmp_image_ds_loc) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] vm_util.copy_virtual_disk( [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] session._wait_for_task(vmdk_copy_task) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return self.wait_for_task(task_ref) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return evt.wait() [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] result = hub.switch() [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] return self.greenlet.switch() [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] self.f(*self.args, **self.kw) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] raise exceptions.translate_fault(task_info.error) [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Faults: ['InvalidArgument'] [ 1332.870068] env[69227]: ERROR nova.compute.manager [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] [ 1332.870857] env[69227]: DEBUG nova.compute.utils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1332.872645] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Build of instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c was re-scheduled: A specified parameter was not correct: fileType [ 1332.872645] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1332.873027] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1332.873214] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1332.873441] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1332.873610] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1333.627718] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.130918] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Took 1.26 seconds to deallocate network for instance. [ 1334.427393] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1335.161794] env[69227]: INFO nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleted allocations for instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c [ 1335.670487] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 523.799s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.671919] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 326.237s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.672162] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1335.672370] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1335.672550] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.674443] env[69227]: INFO nova.compute.manager [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Terminating instance [ 1335.676064] env[69227]: DEBUG nova.compute.manager [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1335.676264] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1335.676521] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74296f89-0abb-42c9-8ba1-e1de31a28daf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.685683] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6389dc2e-c0c2-4000-b753-1be8d421d5e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.713082] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c could not be found. [ 1335.713315] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1335.713498] env[69227]: INFO nova.compute.manager [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1335.713746] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1335.713969] env[69227]: DEBUG nova.compute.manager [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1335.714077] env[69227]: DEBUG nova.network.neutron [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1336.174945] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1336.239680] env[69227]: DEBUG nova.network.neutron [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.696095] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.696432] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.697905] env[69227]: INFO nova.compute.claims [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1336.737129] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.737381] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1336.742364] env[69227]: INFO nova.compute.manager [-] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] Took 1.03 seconds to deallocate network for instance. [ 1336.769887] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1336.770116] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.767997] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8bf66e98-b853-49ed-b6dc-bd75918420b8 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.096s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.769383] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 165.713s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1337.769383] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c] During sync_power_state the instance has a pending task (deleting). Skip. [ 1337.769383] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "fc8833e5-ab33-4ff5-9c05-5ef9ee2d8f4c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1337.951160] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33071bdb-1168-4baa-a6f0-092adc085f81 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.959088] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9901f3b-97ab-4ddd-8e91-3a0c24af4151 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.991573] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ef3c5a-7325-40a2-b1f1-fe5a0d55b2d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.998844] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336619fc-c062-4503-989d-8aa83d367cef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.012012] env[69227]: DEBUG nova.compute.provider_tree [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.427727] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.515066] env[69227]: DEBUG nova.scheduler.client.report [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1339.020428] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.020953] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1339.526281] env[69227]: DEBUG nova.compute.utils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1339.527781] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1339.528073] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1339.565936] env[69227]: DEBUG nova.policy [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f18490b43e2a4f89b121b6a682b7e850', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3eeddfd8cf394d49bd88536877399fff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1339.855547] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Successfully created port: a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.032769] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1341.043123] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1341.068718] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1341.068969] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1341.069204] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1341.069319] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1341.069465] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1341.069622] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1341.069811] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1341.070071] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1341.070255] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1341.070421] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1341.070596] env[69227]: DEBUG nova.virt.hardware [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1341.071503] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a605e95b-bf92-4402-80c3-e1b560db9aeb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.079459] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6f58ad-13a7-41f2-b2ea-c6bda5f518ca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.230560] env[69227]: DEBUG nova.compute.manager [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Received event network-vif-plugged-a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1341.230832] env[69227]: DEBUG oslo_concurrency.lockutils [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] Acquiring lock "891a992b-5cbb-404e-8225-3ada55327def-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.230991] env[69227]: DEBUG oslo_concurrency.lockutils [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] Lock "891a992b-5cbb-404e-8225-3ada55327def-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.231180] env[69227]: DEBUG oslo_concurrency.lockutils [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] Lock "891a992b-5cbb-404e-8225-3ada55327def-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.231345] env[69227]: DEBUG nova.compute.manager [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] No waiting events found dispatching network-vif-plugged-a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1341.231509] env[69227]: WARNING nova.compute.manager [req-f6933c6c-0c9c-40bb-9481-a9bd3e92ebd7 req-fb5a756a-d93e-4eaf-a26d-a734278f00d2 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Received unexpected event network-vif-plugged-a9fbdcde-aa59-4ccb-abc5-384f6c54142a for instance with vm_state building and task_state spawning. [ 1341.320197] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Successfully updated port: a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1341.427110] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.427356] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1341.826845] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.826969] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1341.827166] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1341.930604] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.930900] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1341.931117] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1341.931379] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1341.932265] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb8edba-e3d1-4c91-b32c-a6c1e57025ad {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.940210] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d572022a-88b9-4463-93d9-3061c653bc6d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.954127] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160d5eba-ebe2-4161-b4c0-f0fb6f280007 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.960451] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f863ca0-727c-41a9-be03-2a96f5c8e449 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.988392] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180981MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1341.988538] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1341.988742] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1342.362209] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1342.491499] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Updating instance_info_cache with network_info: [{"id": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "address": "fa:16:3e:da:69:48", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fbdcde-aa", "ovs_interfaceid": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.998984] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1342.999346] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance network_info: |[{"id": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "address": "fa:16:3e:da:69:48", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fbdcde-aa", "ovs_interfaceid": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1343.002964] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:da:69:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '63e45f61-1d9b-4660-8d25-89fb68d45cd3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9fbdcde-aa59-4ccb-abc5-384f6c54142a', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1343.010827] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1343.011064] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1343.011314] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35da36f6-ff07-4338-898b-f7ed35700205 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.027109] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027255] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027380] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027501] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027614] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027727] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027836] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.027957] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.028118] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.028242] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1343.035757] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1343.035757] env[69227]: value = "task-3475084" [ 1343.035757] env[69227]: _type = "Task" [ 1343.035757] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.046863] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475084, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.258868] env[69227]: DEBUG nova.compute.manager [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Received event network-changed-a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1343.259083] env[69227]: DEBUG nova.compute.manager [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Refreshing instance network info cache due to event network-changed-a9fbdcde-aa59-4ccb-abc5-384f6c54142a. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1343.259300] env[69227]: DEBUG oslo_concurrency.lockutils [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] Acquiring lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.259476] env[69227]: DEBUG oslo_concurrency.lockutils [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] Acquired lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1343.259628] env[69227]: DEBUG nova.network.neutron [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Refreshing network info cache for port a9fbdcde-aa59-4ccb-abc5-384f6c54142a {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1343.531201] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1343.546174] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475084, 'name': CreateVM_Task, 'duration_secs': 0.307472} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.546401] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1343.547065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1343.547233] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1343.547543] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1343.547819] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9714843b-f7fa-4b54-a16b-b8ad7492c2b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.552295] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1343.552295] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b44e2f-6123-273f-05ef-e629a189dd12" [ 1343.552295] env[69227]: _type = "Task" [ 1343.552295] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.559621] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b44e2f-6123-273f-05ef-e629a189dd12, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.942582] env[69227]: DEBUG nova.network.neutron [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Updated VIF entry in instance network info cache for port a9fbdcde-aa59-4ccb-abc5-384f6c54142a. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1343.942934] env[69227]: DEBUG nova.network.neutron [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Updating instance_info_cache with network_info: [{"id": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "address": "fa:16:3e:da:69:48", "network": {"id": "9d87725a-9e64-45b4-9a2c-d46e5508375b", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-993429914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3eeddfd8cf394d49bd88536877399fff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "63e45f61-1d9b-4660-8d25-89fb68d45cd3", "external-id": "nsx-vlan-transportzone-43", "segmentation_id": 43, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9fbdcde-aa", "ovs_interfaceid": "a9fbdcde-aa59-4ccb-abc5-384f6c54142a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1344.034090] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ce3124b0-4a17-470a-bdb1-164c53a26f37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1344.062566] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.062792] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1344.063013] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.445511] env[69227]: DEBUG oslo_concurrency.lockutils [req-b537f070-b202-4295-9359-60093414eb4d req-f9885c2a-3e40-40cd-8dca-df8cfe82fc69 service nova] Releasing lock "refresh_cache-891a992b-5cbb-404e-8225-3ada55327def" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1344.536651] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cccd529c-d780-404d-b222-7c0213363dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1345.039858] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7eaa0907-ab53-4499-a0d3-723a348279b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1345.542831] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 341cf5ae-4ae9-4dd3-a6a8-19eb95189221 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1346.046432] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1346.549533] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c2ccfca6-9703-429d-ba58-ee7b155edfce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1347.052801] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc2e221c-f064-4aea-8316-c2d2e01d0597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1347.555667] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d61f136b-edb3-4971-a8f5-c96dd86bd2f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1348.060796] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1348.562527] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9b7a3c5c-96f1-461f-8bca-50c44315d737 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.066080] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c540e175-7485-4384-9c45-f8a6b0c64b7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1349.569280] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.073043] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1350.073184] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1350.073273] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1350.330749] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc5ee04-f5bc-46bd-9fd8-8809cdba37f8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.338617] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973b5f28-b0e6-492d-b92e-cbf6d0e7e3cc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.368826] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c909baf-7731-49c9-84ce-83a5cb2cc403 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.375973] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15fc055-16a1-4b56-ad30-3c06d7add86a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.389941] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.893893] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1351.398342] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1351.398565] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.410s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.393897] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.394228] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.394274] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1352.394385] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.899916] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.901350] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.901350] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.901615] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1352.901879] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1352.902911] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.902911] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.902911] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1352.902911] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1355.587120] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1355.587363] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1379.396443] env[69227]: WARNING oslo_vmware.rw_handles [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1379.396443] env[69227]: ERROR oslo_vmware.rw_handles [ 1379.397078] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1379.399038] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1379.399314] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Copying Virtual Disk [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/05d75c0d-35c4-470c-881a-30454422d66f/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1379.399599] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16fce218-3444-4a99-81cc-432dfd42f123 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.408531] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1379.408531] env[69227]: value = "task-3475085" [ 1379.408531] env[69227]: _type = "Task" [ 1379.408531] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.416336] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475085, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.919285] env[69227]: DEBUG oslo_vmware.exceptions [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1379.919580] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1379.920110] env[69227]: ERROR nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1379.920110] env[69227]: Faults: ['InvalidArgument'] [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Traceback (most recent call last): [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] yield resources [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self.driver.spawn(context, instance, image_meta, [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self._fetch_image_if_missing(context, vi) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] image_cache(vi, tmp_image_ds_loc) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] vm_util.copy_virtual_disk( [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] session._wait_for_task(vmdk_copy_task) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return self.wait_for_task(task_ref) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return evt.wait() [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] result = hub.switch() [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return self.greenlet.switch() [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self.f(*self.args, **self.kw) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] raise exceptions.translate_fault(task_info.error) [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Faults: ['InvalidArgument'] [ 1379.920110] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] [ 1379.920927] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Terminating instance [ 1379.921882] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1379.922096] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1379.922330] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-585f18da-6e83-47ce-b5b7-3942b86bc18a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.924629] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1379.924825] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1379.925553] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caac3d55-922c-4c76-8ec6-957e58da0b44 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.931986] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1379.932207] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b3260fa-de76-4227-bbcf-2abb4f203b0d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.934317] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1379.934489] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1379.935433] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2c0cc92-c998-48ed-8ee9-ee5aa8462fed {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.940207] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for the task: (returnval){ [ 1379.940207] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52215843-ce60-116c-9408-dbc2ece1becd" [ 1379.940207] env[69227]: _type = "Task" [ 1379.940207] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.947245] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52215843-ce60-116c-9408-dbc2ece1becd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.004502] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1380.004784] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1380.004973] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleting the datastore file [datastore2] 3a8be11c-6a0e-4dbb-97c0-4290a2716487 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1380.005261] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9651effe-0f38-4afe-b2b8-f16a77e99390 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.010923] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for the task: (returnval){ [ 1380.010923] env[69227]: value = "task-3475087" [ 1380.010923] env[69227]: _type = "Task" [ 1380.010923] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.018566] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.450713] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1380.451046] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Creating directory with path [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1380.451204] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82e63aeb-38c7-49e7-9627-7e18916f92af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.463050] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Created directory with path [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1380.463050] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Fetch image to [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1380.463050] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1380.463369] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81156e9f-797b-4389-a966-ee7ac7ad03a4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.470905] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604caf32-5d7a-42c2-9728-e62384ae9060 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.480236] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a94d59-65f8-48bf-9ecb-7ada0677dc61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.510148] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f7d6fe-056f-4aed-a13d-0c7ae855d379 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.520882] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f06a0f94-ae5a-48bb-afee-341ed1b7464d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.522507] env[69227]: DEBUG oslo_vmware.api [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Task: {'id': task-3475087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078221} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.522745] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1380.522931] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1380.523112] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1380.523287] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1380.525390] env[69227]: DEBUG nova.compute.claims [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1380.525562] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1380.525881] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1380.548154] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1380.705144] env[69227]: DEBUG oslo_vmware.rw_handles [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1380.768895] env[69227]: DEBUG oslo_vmware.rw_handles [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1380.769115] env[69227]: DEBUG oslo_vmware.rw_handles [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1381.307708] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c5b0d4-4cfc-4f80-a862-070f369dae23 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.315799] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-718b03f5-ba5a-421e-b8d4-ccd7b49197d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.346091] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c0d060-e2d8-4a33-a4e9-db2abb5e509e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.352898] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76592a82-9371-40db-a3a2-880a9ec7130d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.365636] env[69227]: DEBUG nova.compute.provider_tree [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1381.869521] env[69227]: DEBUG nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1382.374441] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.848s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1382.375102] env[69227]: ERROR nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.375102] env[69227]: Faults: ['InvalidArgument'] [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Traceback (most recent call last): [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self.driver.spawn(context, instance, image_meta, [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self._fetch_image_if_missing(context, vi) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] image_cache(vi, tmp_image_ds_loc) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] vm_util.copy_virtual_disk( [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] session._wait_for_task(vmdk_copy_task) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return self.wait_for_task(task_ref) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return evt.wait() [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] result = hub.switch() [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] return self.greenlet.switch() [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] self.f(*self.args, **self.kw) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] raise exceptions.translate_fault(task_info.error) [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Faults: ['InvalidArgument'] [ 1382.375102] env[69227]: ERROR nova.compute.manager [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] [ 1382.375867] env[69227]: DEBUG nova.compute.utils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1382.377654] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Build of instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 was re-scheduled: A specified parameter was not correct: fileType [ 1382.377654] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1382.378049] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1382.378272] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1382.378392] env[69227]: DEBUG nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1382.378552] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1383.162966] env[69227]: DEBUG nova.network.neutron [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1383.666226] env[69227]: INFO nova.compute.manager [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Took 1.29 seconds to deallocate network for instance. [ 1384.709759] env[69227]: INFO nova.scheduler.client.report [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleted allocations for instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 [ 1385.218101] env[69227]: DEBUG oslo_concurrency.lockutils [None req-101cda96-da71-4a65-8c46-c1c88ed2b480 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 573.307s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.219423] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 376.111s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.219691] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1385.219904] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1385.220084] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1385.221932] env[69227]: INFO nova.compute.manager [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Terminating instance [ 1385.223663] env[69227]: DEBUG nova.compute.manager [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1385.223871] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1385.224140] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7568bab4-9a92-4f27-b6f1-8ab6602df8bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.233427] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0066c06d-3ec3-43d7-b390-171d4ff4ed9e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.264464] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3a8be11c-6a0e-4dbb-97c0-4290a2716487 could not be found. [ 1385.264664] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1385.264838] env[69227]: INFO nova.compute.manager [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1385.265182] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1385.265435] env[69227]: DEBUG nova.compute.manager [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1385.265531] env[69227]: DEBUG nova.network.neutron [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1385.723136] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1385.783385] env[69227]: DEBUG nova.network.neutron [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.245150] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1386.245429] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1386.246897] env[69227]: INFO nova.compute.claims [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1386.285954] env[69227]: INFO nova.compute.manager [-] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] Took 1.02 seconds to deallocate network for instance. [ 1387.310759] env[69227]: DEBUG oslo_concurrency.lockutils [None req-102ca924-0025-4d83-9023-9eee819bda15 tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.091s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.311659] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 215.255s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.311854] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 3a8be11c-6a0e-4dbb-97c0-4290a2716487] During sync_power_state the instance has a pending task (deleting). Skip. [ 1387.312068] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "3a8be11c-6a0e-4dbb-97c0-4290a2716487" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.497044] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271786e9-2f38-453c-b885-fc09107bd288 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.504599] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9b030c-257b-42da-a820-34146894c1ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.535343] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bd2f48-77e6-484e-aca5-2b12895cec23 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.541961] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2c38e9-7c17-4f1d-b45c-2b56b1396d38 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.554632] env[69227]: DEBUG nova.compute.provider_tree [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1388.058172] env[69227]: DEBUG nova.scheduler.client.report [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1388.563530] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.066256] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "b22bc162-722d-4409-8f3f-7a5bcfec652c" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.066521] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "b22bc162-722d-4409-8f3f-7a5bcfec652c" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.569295] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "b22bc162-722d-4409-8f3f-7a5bcfec652c" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.569998] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1390.075044] env[69227]: DEBUG nova.compute.utils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1390.076452] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1390.076631] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1390.112075] env[69227]: DEBUG nova.policy [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3bab6c4841f14dcfaf5928288f1a11b5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '10a1bb2752034162a3b3b789c48406f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1390.579927] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1390.600406] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Successfully created port: 375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1391.591771] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1391.618190] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1391.618468] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1391.618624] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1391.618805] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1391.618951] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1391.619132] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1391.619346] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1391.619504] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1391.619669] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1391.619828] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1391.620017] env[69227]: DEBUG nova.virt.hardware [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1391.620880] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fc286db-afa1-4a21-86da-2bda08e22e31 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.629799] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1345bd75-c42f-4801-9a12-8bb939ba5217 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.973070] env[69227]: DEBUG nova.compute.manager [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Received event network-vif-plugged-375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1391.973299] env[69227]: DEBUG oslo_concurrency.lockutils [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] Acquiring lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1391.973508] env[69227]: DEBUG oslo_concurrency.lockutils [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.973673] env[69227]: DEBUG oslo_concurrency.lockutils [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1391.973836] env[69227]: DEBUG nova.compute.manager [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] No waiting events found dispatching network-vif-plugged-375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1391.974214] env[69227]: WARNING nova.compute.manager [req-87bb4dcb-ab92-4545-889f-aa716af1ff31 req-3c2b5f10-0eb9-4a47-9f0e-b603335cac1a service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Received unexpected event network-vif-plugged-375f87ab-8f9e-462e-8d59-5846ba52515a for instance with vm_state building and task_state spawning. [ 1392.071984] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Successfully updated port: 375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1392.574773] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1392.574943] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquired lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1392.575262] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1393.111192] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1393.238267] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Updating instance_info_cache with network_info: [{"id": "375f87ab-8f9e-462e-8d59-5846ba52515a", "address": "fa:16:3e:2b:27:e8", "network": {"id": "78f39790-b194-404c-a3b3-c9bfa0705b7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562728856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10a1bb2752034162a3b3b789c48406f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap375f87ab-8f", "ovs_interfaceid": "375f87ab-8f9e-462e-8d59-5846ba52515a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1393.742439] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Releasing lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1393.742766] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance network_info: |[{"id": "375f87ab-8f9e-462e-8d59-5846ba52515a", "address": "fa:16:3e:2b:27:e8", "network": {"id": "78f39790-b194-404c-a3b3-c9bfa0705b7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562728856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10a1bb2752034162a3b3b789c48406f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap375f87ab-8f", "ovs_interfaceid": "375f87ab-8f9e-462e-8d59-5846ba52515a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1393.743206] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:27:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '667a2e97-c1be-421d-9941-6b84c2629b43', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '375f87ab-8f9e-462e-8d59-5846ba52515a', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1393.750580] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Creating folder: Project (10a1bb2752034162a3b3b789c48406f5). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1393.750841] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea9a3fb5-b063-45f7-b478-8109875f3a81 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.762785] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Created folder: Project (10a1bb2752034162a3b3b789c48406f5) in parent group-v694623. [ 1393.762973] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Creating folder: Instances. Parent ref: group-v694694. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1393.763217] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-81b647cb-7ebf-44ad-ab1e-fa9aae99b1e9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.772292] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Created folder: Instances in parent group-v694694. [ 1393.772517] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1393.772697] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1393.772882] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5b5050b-e4c6-48c8-8abc-a7bc68de5a33 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.791247] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1393.791247] env[69227]: value = "task-3475090" [ 1393.791247] env[69227]: _type = "Task" [ 1393.791247] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.798544] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475090, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.002215] env[69227]: DEBUG nova.compute.manager [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Received event network-changed-375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1394.002467] env[69227]: DEBUG nova.compute.manager [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Refreshing instance network info cache due to event network-changed-375f87ab-8f9e-462e-8d59-5846ba52515a. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1394.002697] env[69227]: DEBUG oslo_concurrency.lockutils [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] Acquiring lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.002996] env[69227]: DEBUG oslo_concurrency.lockutils [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] Acquired lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.003231] env[69227]: DEBUG nova.network.neutron [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Refreshing network info cache for port 375f87ab-8f9e-462e-8d59-5846ba52515a {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1394.300933] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475090, 'name': CreateVM_Task, 'duration_secs': 0.28184} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.301257] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1394.301923] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1394.302120] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1394.302432] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1394.302684] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9cfd5e8-fb3e-45fa-80ca-fb35b72c1463 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.307923] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for the task: (returnval){ [ 1394.307923] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5221995d-6c39-488d-e222-bfd47f1d5bff" [ 1394.307923] env[69227]: _type = "Task" [ 1394.307923] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.315164] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5221995d-6c39-488d-e222-bfd47f1d5bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.720547] env[69227]: DEBUG nova.network.neutron [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Updated VIF entry in instance network info cache for port 375f87ab-8f9e-462e-8d59-5846ba52515a. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1394.720911] env[69227]: DEBUG nova.network.neutron [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Updating instance_info_cache with network_info: [{"id": "375f87ab-8f9e-462e-8d59-5846ba52515a", "address": "fa:16:3e:2b:27:e8", "network": {"id": "78f39790-b194-404c-a3b3-c9bfa0705b7d", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1562728856-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "10a1bb2752034162a3b3b789c48406f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "667a2e97-c1be-421d-9941-6b84c2629b43", "external-id": "nsx-vlan-transportzone-484", "segmentation_id": 484, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap375f87ab-8f", "ovs_interfaceid": "375f87ab-8f9e-462e-8d59-5846ba52515a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1394.818791] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1394.819016] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1394.819249] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.223590] env[69227]: DEBUG oslo_concurrency.lockutils [req-a14cd23f-0260-42d2-940b-498f70bfe245 req-4a51588d-7110-4a46-9f33-69a587c820fb service nova] Releasing lock "refresh_cache-af538b0d-b8c6-4f93-81e7-8f27b8a96735" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1396.427662] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1399.427287] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.422466] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.427085] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.427273] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1402.930700] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.930911] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1402.931111] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1402.931271] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1402.932157] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e16e631f-9c60-46f1-9e5b-9400148a1987 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.940572] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9bdd893-4a38-4cde-8fde-2c7a8b744b90 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.954186] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019f7815-5376-4ae3-b5ea-ba1f457aa160 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.960673] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d196daa6-fb1b-44b7-9571-9bea3d454104 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.988657] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1402.988778] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1402.988991] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1404.021661] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.021924] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.021924] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022060] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022186] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022302] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022416] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022529] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022639] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.022748] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1404.525905] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ce3124b0-4a17-470a-bdb1-164c53a26f37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.030025] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cccd529c-d780-404d-b222-7c0213363dce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1405.532381] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 7eaa0907-ab53-4499-a0d3-723a348279b8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.038305] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 341cf5ae-4ae9-4dd3-a6a8-19eb95189221 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1406.541469] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1407.044456] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c2ccfca6-9703-429d-ba58-ee7b155edfce has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1407.547455] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc2e221c-f064-4aea-8316-c2d2e01d0597 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1408.050920] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d61f136b-edb3-4971-a8f5-c96dd86bd2f5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1408.555061] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.058562] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9b7a3c5c-96f1-461f-8bca-50c44315d737 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1409.561457] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c540e175-7485-4384-9c45-f8a6b0c64b7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.064715] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.568035] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1410.597673] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1411.071893] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1411.071893] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1411.071893] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1411.340987] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-726bd0b9-bc4b-4379-805c-11164c99d390 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.351757] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c56afc-2918-4d4f-8cfd-193e9d3a2ecd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.405196] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a36d58-0b1e-4682-b0ad-1c0af3ec9623 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.412991] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5d2824-39ce-4b1f-9965-23c563a9f1d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.426937] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1411.822524] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "6085a4f8-f595-417c-9d33-22376a687be6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1411.930231] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1412.436841] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1412.436841] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.448s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1412.669637] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "891a992b-5cbb-404e-8225-3ada55327def" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.436683] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.655417] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1413.942970] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1413.943125] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1413.943239] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1414.447172] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447352] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447480] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447604] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447722] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447839] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.447960] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.448091] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.448224] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.448322] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1414.448439] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1414.448635] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.448797] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.448955] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.449073] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1426.063739] env[69227]: WARNING oslo_vmware.rw_handles [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1426.063739] env[69227]: ERROR oslo_vmware.rw_handles [ 1426.064497] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1426.066476] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1426.066736] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Copying Virtual Disk [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/769b80f7-8031-42c8-9420-f0e467001072/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1426.067041] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-438ab3f0-6632-42b5-bcd9-a6fc11abb955 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.075908] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for the task: (returnval){ [ 1426.075908] env[69227]: value = "task-3475091" [ 1426.075908] env[69227]: _type = "Task" [ 1426.075908] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.083725] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Task: {'id': task-3475091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.585947] env[69227]: DEBUG oslo_vmware.exceptions [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1426.586312] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1426.586953] env[69227]: ERROR nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1426.586953] env[69227]: Faults: ['InvalidArgument'] [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Traceback (most recent call last): [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] yield resources [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self.driver.spawn(context, instance, image_meta, [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self._fetch_image_if_missing(context, vi) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] image_cache(vi, tmp_image_ds_loc) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] vm_util.copy_virtual_disk( [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] session._wait_for_task(vmdk_copy_task) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return self.wait_for_task(task_ref) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return evt.wait() [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] result = hub.switch() [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return self.greenlet.switch() [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self.f(*self.args, **self.kw) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] raise exceptions.translate_fault(task_info.error) [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Faults: ['InvalidArgument'] [ 1426.586953] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] [ 1426.587907] env[69227]: INFO nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Terminating instance [ 1426.589315] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1426.589572] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1426.589848] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd9469e8-c902-49c5-9e08-3a2193b18ded {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.592093] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1426.592301] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1426.593035] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da17ffb-9ae3-40b9-b94f-abc930502dfb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.600156] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1426.600393] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cc17a504-38ac-4144-a7ef-bcb5f03d00ec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.602650] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1426.602892] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1426.603777] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b7c503f-715b-4b92-a3f1-57f02d1f3023 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.608705] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1426.608705] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52146e0b-b0b9-f98d-b5e8-3467d76ac18c" [ 1426.608705] env[69227]: _type = "Task" [ 1426.608705] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.616784] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52146e0b-b0b9-f98d-b5e8-3467d76ac18c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.670942] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1426.671176] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1426.671359] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Deleting the datastore file [datastore2] b6ffb3bc-196c-4ac2-b506-3fc514653c5e {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.671622] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85d2823c-5f9d-49cf-bf79-293a5cb5aa64 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.678218] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for the task: (returnval){ [ 1426.678218] env[69227]: value = "task-3475093" [ 1426.678218] env[69227]: _type = "Task" [ 1426.678218] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.686495] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Task: {'id': task-3475093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.119509] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1427.119838] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating directory with path [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1427.120009] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b34e4b3-c80b-4cca-bf64-45d73879f23a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.132901] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created directory with path [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1427.133106] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Fetch image to [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1427.133279] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1427.133985] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d87ba25-df55-4b69-847a-235343cef2af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.140570] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed1f6f6-b925-4ab6-888b-89def6f281ac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.149264] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa871a2-521c-4345-a57f-8b3b9a502255 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.196070] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b88e1c-238a-413c-81f9-9397624a887f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.203067] env[69227]: DEBUG oslo_vmware.api [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Task: {'id': task-3475093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069574} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.204517] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.204702] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1427.204878] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1427.205058] env[69227]: INFO nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1427.206721] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b8799adb-b84f-4bff-a9fc-496ffda40220 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.208483] env[69227]: DEBUG nova.compute.claims [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1427.208653] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1427.208879] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1427.231993] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1427.282306] env[69227]: DEBUG oslo_vmware.rw_handles [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1427.341971] env[69227]: DEBUG oslo_vmware.rw_handles [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1427.342202] env[69227]: DEBUG oslo_vmware.rw_handles [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1428.024430] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35562468-b14e-42e4-a874-2fe8fe461fc3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.032521] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a98f77-fd79-4428-98b9-0a07c61188a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.062310] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11998378-f32c-4d92-a76c-ada26edde2da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.069032] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9423528d-d6ef-47e6-81bf-a62b3505937d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.081625] env[69227]: DEBUG nova.compute.provider_tree [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1428.584686] env[69227]: DEBUG nova.scheduler.client.report [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1429.090929] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1429.092022] env[69227]: ERROR nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1429.092022] env[69227]: Faults: ['InvalidArgument'] [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Traceback (most recent call last): [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self.driver.spawn(context, instance, image_meta, [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self._fetch_image_if_missing(context, vi) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] image_cache(vi, tmp_image_ds_loc) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] vm_util.copy_virtual_disk( [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] session._wait_for_task(vmdk_copy_task) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return self.wait_for_task(task_ref) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return evt.wait() [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] result = hub.switch() [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] return self.greenlet.switch() [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] self.f(*self.args, **self.kw) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] raise exceptions.translate_fault(task_info.error) [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Faults: ['InvalidArgument'] [ 1429.092022] env[69227]: ERROR nova.compute.manager [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] [ 1429.092940] env[69227]: DEBUG nova.compute.utils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1429.093832] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Build of instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e was re-scheduled: A specified parameter was not correct: fileType [ 1429.093832] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1429.094258] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1429.094426] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1429.094600] env[69227]: DEBUG nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1429.094760] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1429.829900] env[69227]: DEBUG nova.network.neutron [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1430.332885] env[69227]: INFO nova.compute.manager [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Took 1.24 seconds to deallocate network for instance. [ 1431.365446] env[69227]: INFO nova.scheduler.client.report [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Deleted allocations for instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e [ 1431.875656] env[69227]: DEBUG oslo_concurrency.lockutils [None req-79d1549e-3fa9-45d3-b97e-e4db948dcafe tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 614.823s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.876936] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 418.828s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.877702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Acquiring lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1431.877702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1431.877702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1431.881855] env[69227]: INFO nova.compute.manager [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Terminating instance [ 1431.883611] env[69227]: DEBUG nova.compute.manager [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1431.883810] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1431.884089] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-762c7009-77d6-41cf-8c2f-d02a8de62950 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.892594] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad1e831-abb4-4719-9df0-d4c78744ebd4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.921029] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b6ffb3bc-196c-4ac2-b506-3fc514653c5e could not be found. [ 1431.921252] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1431.921432] env[69227]: INFO nova.compute.manager [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1431.921669] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1431.921887] env[69227]: DEBUG nova.compute.manager [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1431.921980] env[69227]: DEBUG nova.network.neutron [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1432.380624] env[69227]: DEBUG nova.compute.manager [None req-698f67a2-db11-43b3-9e21-45ca66e008fc tempest-ServerActionsTestOtherB-114701388 tempest-ServerActionsTestOtherB-114701388-project-member] [instance: ce3124b0-4a17-470a-bdb1-164c53a26f37] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1432.441265] env[69227]: DEBUG nova.network.neutron [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.886099] env[69227]: DEBUG nova.compute.manager [None req-698f67a2-db11-43b3-9e21-45ca66e008fc tempest-ServerActionsTestOtherB-114701388 tempest-ServerActionsTestOtherB-114701388-project-member] [instance: ce3124b0-4a17-470a-bdb1-164c53a26f37] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1432.943319] env[69227]: INFO nova.compute.manager [-] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] Took 1.02 seconds to deallocate network for instance. [ 1433.400100] env[69227]: DEBUG oslo_concurrency.lockutils [None req-698f67a2-db11-43b3-9e21-45ca66e008fc tempest-ServerActionsTestOtherB-114701388 tempest-ServerActionsTestOtherB-114701388-project-member] Lock "ce3124b0-4a17-470a-bdb1-164c53a26f37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 197.260s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.534749] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1433.534949] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.902761] env[69227]: DEBUG nova.compute.manager [None req-c8a08a0b-bac7-45ba-b464-c0ced68539e6 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: cccd529c-d780-404d-b222-7c0213363dce] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1433.973223] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2255bc14-2f58-4541-bc79-3f814d3669db tempest-SecurityGroupsTestJSON-2139240552 tempest-SecurityGroupsTestJSON-2139240552-project-member] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.096s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1433.977207] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 261.917s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1433.977207] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: b6ffb3bc-196c-4ac2-b506-3fc514653c5e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1433.977207] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "b6ffb3bc-196c-4ac2-b506-3fc514653c5e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.003s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1434.407791] env[69227]: DEBUG nova.compute.manager [None req-c8a08a0b-bac7-45ba-b464-c0ced68539e6 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: cccd529c-d780-404d-b222-7c0213363dce] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1434.922183] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c8a08a0b-bac7-45ba-b464-c0ced68539e6 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "cccd529c-d780-404d-b222-7c0213363dce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.685s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1435.316809] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "1397d96c-8a1d-4940-9b58-148435f12497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1435.317036] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1435.424473] env[69227]: DEBUG nova.compute.manager [None req-853b5102-efcd-4317-b983-ac7c466cd33b tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 7eaa0907-ab53-4499-a0d3-723a348279b8] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1435.928852] env[69227]: DEBUG nova.compute.manager [None req-853b5102-efcd-4317-b983-ac7c466cd33b tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 7eaa0907-ab53-4499-a0d3-723a348279b8] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1436.446027] env[69227]: DEBUG oslo_concurrency.lockutils [None req-853b5102-efcd-4317-b983-ac7c466cd33b tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "7eaa0907-ab53-4499-a0d3-723a348279b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.209s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1436.948717] env[69227]: DEBUG nova.compute.manager [None req-e7eb5cb2-d0fa-4fcf-b29a-0dc5cee67310 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 341cf5ae-4ae9-4dd3-a6a8-19eb95189221] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1437.324949] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "dcabb6a4-2b08-47df-8687-18431ee85153" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1437.325200] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1437.452573] env[69227]: DEBUG nova.compute.manager [None req-e7eb5cb2-d0fa-4fcf-b29a-0dc5cee67310 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 341cf5ae-4ae9-4dd3-a6a8-19eb95189221] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1437.967364] env[69227]: DEBUG oslo_concurrency.lockutils [None req-e7eb5cb2-d0fa-4fcf-b29a-0dc5cee67310 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "341cf5ae-4ae9-4dd3-a6a8-19eb95189221" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 196.381s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1438.470406] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1438.992312] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.992592] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.994069] env[69227]: INFO nova.compute.claims [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1440.226171] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c7edf9-96db-4211-83a5-2f7f86bd1d3d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.233461] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad771a53-7854-46be-9994-779feb7246f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.262384] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f1581c-d5ee-49f9-9237-e950b4a10cc6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.269988] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d209b7-b5d1-43e4-93b4-d46c36ac9295 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.284132] env[69227]: DEBUG nova.compute.provider_tree [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1440.789402] env[69227]: DEBUG nova.scheduler.client.report [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1441.294348] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1441.294989] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1441.800463] env[69227]: DEBUG nova.compute.utils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1441.801803] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1441.801979] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1441.838417] env[69227]: DEBUG nova.policy [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2a4643d11b854cecbdd637d6d736caed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5c873bc62f4a43a0b4e6e1dafb2c220a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1442.170103] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Successfully created port: b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1442.305530] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1443.317198] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1443.341598] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1443.341855] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1443.342009] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1443.342202] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1443.342348] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1443.342514] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1443.342751] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1443.342913] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1443.343092] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1443.343257] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1443.343425] env[69227]: DEBUG nova.virt.hardware [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1443.344323] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee63b22-486f-4038-a5e7-e4319685c54f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.352202] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c965706d-8214-44ee-ba35-f53c706651ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.612685] env[69227]: DEBUG nova.compute.manager [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Received event network-vif-plugged-b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1443.612897] env[69227]: DEBUG oslo_concurrency.lockutils [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] Acquiring lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1443.613140] env[69227]: DEBUG oslo_concurrency.lockutils [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1443.613292] env[69227]: DEBUG oslo_concurrency.lockutils [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1443.613424] env[69227]: DEBUG nova.compute.manager [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] No waiting events found dispatching network-vif-plugged-b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.613634] env[69227]: WARNING nova.compute.manager [req-ad001435-c139-414d-8412-112e0bf26e57 req-f8335792-faef-4123-8b03-c8f5028b0633 service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Received unexpected event network-vif-plugged-b397d360-d163-4f3e-bed2-20bd11485b8d for instance with vm_state building and task_state spawning. [ 1443.864594] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Successfully updated port: b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.368761] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.368761] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquired lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1444.368761] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1444.897637] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1445.011860] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Updating instance_info_cache with network_info: [{"id": "b397d360-d163-4f3e-bed2-20bd11485b8d", "address": "fa:16:3e:de:ed:86", "network": {"id": "b1192090-afb0-45e8-a966-1964a15f5569", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1456801010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c873bc62f4a43a0b4e6e1dafb2c220a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb397d360-d1", "ovs_interfaceid": "b397d360-d163-4f3e-bed2-20bd11485b8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.514739] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Releasing lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1445.515077] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance network_info: |[{"id": "b397d360-d163-4f3e-bed2-20bd11485b8d", "address": "fa:16:3e:de:ed:86", "network": {"id": "b1192090-afb0-45e8-a966-1964a15f5569", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1456801010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c873bc62f4a43a0b4e6e1dafb2c220a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb397d360-d1", "ovs_interfaceid": "b397d360-d163-4f3e-bed2-20bd11485b8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1445.515571] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:ed:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f65996a3-f865-4492-9377-cd14ec8b3aae', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b397d360-d163-4f3e-bed2-20bd11485b8d', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.523009] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Creating folder: Project (5c873bc62f4a43a0b4e6e1dafb2c220a). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1445.523273] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-497ac7c6-a343-4200-82bc-8c512f8deb4c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.535425] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Created folder: Project (5c873bc62f4a43a0b4e6e1dafb2c220a) in parent group-v694623. [ 1445.535599] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Creating folder: Instances. Parent ref: group-v694697. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1445.535818] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d4b126cc-1fec-41d9-917f-8be1f36e46c0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.543997] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Created folder: Instances in parent group-v694697. [ 1445.544226] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1445.544431] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1445.544616] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7663cad3-d33c-4111-ba9d-ec133f00ae4c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.562986] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.562986] env[69227]: value = "task-3475096" [ 1445.562986] env[69227]: _type = "Task" [ 1445.562986] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.569880] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475096, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.642919] env[69227]: DEBUG nova.compute.manager [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Received event network-changed-b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1445.643323] env[69227]: DEBUG nova.compute.manager [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Refreshing instance network info cache due to event network-changed-b397d360-d163-4f3e-bed2-20bd11485b8d. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1445.643569] env[69227]: DEBUG oslo_concurrency.lockutils [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] Acquiring lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.643714] env[69227]: DEBUG oslo_concurrency.lockutils [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] Acquired lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1445.643874] env[69227]: DEBUG nova.network.neutron [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Refreshing network info cache for port b397d360-d163-4f3e-bed2-20bd11485b8d {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1446.073576] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475096, 'name': CreateVM_Task, 'duration_secs': 0.291642} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.073776] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1446.074496] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.074680] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1446.074993] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1446.075260] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6453768f-1583-4608-bffa-9f8683692c00 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.079553] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for the task: (returnval){ [ 1446.079553] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5275816c-22ea-dc73-8be9-d599e688ad5f" [ 1446.079553] env[69227]: _type = "Task" [ 1446.079553] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.092699] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5275816c-22ea-dc73-8be9-d599e688ad5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.346848] env[69227]: DEBUG nova.network.neutron [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Updated VIF entry in instance network info cache for port b397d360-d163-4f3e-bed2-20bd11485b8d. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1446.347223] env[69227]: DEBUG nova.network.neutron [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Updating instance_info_cache with network_info: [{"id": "b397d360-d163-4f3e-bed2-20bd11485b8d", "address": "fa:16:3e:de:ed:86", "network": {"id": "b1192090-afb0-45e8-a966-1964a15f5569", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1456801010-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5c873bc62f4a43a0b4e6e1dafb2c220a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f65996a3-f865-4492-9377-cd14ec8b3aae", "external-id": "nsx-vlan-transportzone-31", "segmentation_id": 31, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb397d360-d1", "ovs_interfaceid": "b397d360-d163-4f3e-bed2-20bd11485b8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1446.589314] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1446.589585] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1446.589839] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1446.850087] env[69227]: DEBUG oslo_concurrency.lockutils [req-133d808f-56b7-41c6-ad45-85b911c13147 req-e0d3f7fd-e531-47fc-a974-3a37ed4b64fa service nova] Releasing lock "refresh_cache-cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1456.427370] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1458.684523] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1460.427565] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1461.027684] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "40a459c7-657d-40db-aa78-d16af085a3ee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.027912] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.427072] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1463.929399] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.422601] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.426263] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1464.929637] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1464.929932] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1464.929976] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1464.930617] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1464.930988] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2632690f-0d6f-4ba5-ba92-0b1638a45d5a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.939632] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf6e67a-20fd-4ab2-9379-4422ef34594d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.954224] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e3dd70-92f0-4540-9f60-e4d494df73da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.960579] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33d46eb-289e-470a-86a9-5aaf1cf6c3bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.991442] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180966MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1464.991597] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1464.991822] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1466.026519] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.026782] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.026782] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.026944] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027232] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027379] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027505] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027624] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027741] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.027877] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1466.531186] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.034190] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9b7a3c5c-96f1-461f-8bca-50c44315d737 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1467.537592] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c540e175-7485-4384-9c45-f8a6b0c64b7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.041060] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1468.543235] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.046070] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1469.549751] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.053016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1470.555746] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1471.058832] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 40a459c7-657d-40db-aa78-d16af085a3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1471.059166] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1471.059250] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1471.138468] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1471.151669] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1471.151860] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1471.161281] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1471.177575] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1471.376821] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6463a9-4464-49a8-8ba4-bab01fd2d73e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.384099] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287dc8f8-12d5-410e-a1d3-306c29c3230e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.413598] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a7f066-3616-4587-9714-04ac252c585f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.420374] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23071305-725d-44f8-a261-7ca0c3916421 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.433382] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1471.936886] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1472.443014] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1472.443014] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.451s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1472.443014] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.443014] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 1472.946339] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 0 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 1472.946582] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1472.946723] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 1474.395289] env[69227]: WARNING oslo_vmware.rw_handles [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1474.395289] env[69227]: ERROR oslo_vmware.rw_handles [ 1474.395973] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1474.397998] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1474.398328] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Copying Virtual Disk [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/ee49d12b-6cd3-4810-bae5-35f493ee7b09/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1474.398634] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f011a5f-1f90-4acf-89f2-81a09ccf1d31 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.408467] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1474.408467] env[69227]: value = "task-3475097" [ 1474.408467] env[69227]: _type = "Task" [ 1474.408467] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.416212] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475097, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.450855] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.451043] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1474.451166] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1474.917851] env[69227]: DEBUG oslo_vmware.exceptions [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1474.918152] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1474.918711] env[69227]: ERROR nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.918711] env[69227]: Faults: ['InvalidArgument'] [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Traceback (most recent call last): [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] yield resources [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self.driver.spawn(context, instance, image_meta, [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self._fetch_image_if_missing(context, vi) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] image_cache(vi, tmp_image_ds_loc) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] vm_util.copy_virtual_disk( [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] session._wait_for_task(vmdk_copy_task) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return self.wait_for_task(task_ref) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return evt.wait() [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] result = hub.switch() [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return self.greenlet.switch() [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self.f(*self.args, **self.kw) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] raise exceptions.translate_fault(task_info.error) [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Faults: ['InvalidArgument'] [ 1474.918711] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] [ 1474.919778] env[69227]: INFO nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Terminating instance [ 1474.921022] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1474.921022] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.921022] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c850791-9ee6-419c-8c02-9db0550f4cfd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.923045] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1474.923241] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1474.923955] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6dcf8b-80be-40fc-8fa4-cc6ca882fd37 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.930785] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1474.930990] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04721f8a-fcf9-4725-85fb-41ea25650c18 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.933045] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.933222] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1474.934168] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a56b03d7-cd3a-4dae-a04e-f9d7e5e14a39 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.938661] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 1474.938661] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5293c4e4-a69d-617c-cc7d-34c1b49795bc" [ 1474.938661] env[69227]: _type = "Task" [ 1474.938661] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.945828] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5293c4e4-a69d-617c-cc7d-34c1b49795bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.956063] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.956590] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.956670] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.956741] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.956927] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.956986] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.957114] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.957229] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.957343] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.957454] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1474.957569] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1474.957739] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.957905] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.958061] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1474.958218] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1475.001656] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1475.001878] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1475.002073] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleting the datastore file [datastore2] 9c61d411-b6dd-43c9-a59a-8ff3030e6149 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1475.002332] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c70c22bf-8b40-4ea2-88b2-e9f580d47339 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.008911] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1475.008911] env[69227]: value = "task-3475099" [ 1475.008911] env[69227]: _type = "Task" [ 1475.008911] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.016450] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475099, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.449423] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1475.449423] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating directory with path [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1475.449829] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-532679b8-63d0-4bde-96e5-1342dce63671 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.461152] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Created directory with path [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1475.461354] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Fetch image to [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1475.461529] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1475.462262] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44abb96-f23f-41c0-b70d-15998e7dfb61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.468649] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cc4d764-3d75-4fc7-9aaf-df7e9153ba36 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.477680] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc65d58-7ae8-4d6c-8ece-70d6512db1ec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.509036] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9171a9-7740-4a98-b5e1-9aaa00a6d9b3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.519797] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8cb92c4c-c805-4c17-a0a7-599409cb2319 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.521372] env[69227]: DEBUG oslo_vmware.api [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475099, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097057} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.521603] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1475.521783] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1475.521952] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1475.522137] env[69227]: INFO nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1475.524090] env[69227]: DEBUG nova.compute.claims [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1475.524291] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1475.524528] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1475.546209] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1475.596168] env[69227]: DEBUG oslo_vmware.rw_handles [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1475.656216] env[69227]: DEBUG oslo_vmware.rw_handles [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1475.656407] env[69227]: DEBUG oslo_vmware.rw_handles [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1476.243652] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8ea094-0277-45ad-8d53-6b52f7bb5ec6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.251687] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d000b8-33a9-4420-91a1-63f073856749 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.281021] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be8b453-904a-4bc6-ae52-e2949003c5aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.288242] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b97e2b-dc9e-457c-a505-329373c2d65c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.302228] env[69227]: DEBUG nova.compute.provider_tree [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1476.805823] env[69227]: DEBUG nova.scheduler.client.report [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1477.312065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.786s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1477.312065] env[69227]: ERROR nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.312065] env[69227]: Faults: ['InvalidArgument'] [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Traceback (most recent call last): [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self.driver.spawn(context, instance, image_meta, [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self._fetch_image_if_missing(context, vi) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] image_cache(vi, tmp_image_ds_loc) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] vm_util.copy_virtual_disk( [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] session._wait_for_task(vmdk_copy_task) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return self.wait_for_task(task_ref) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return evt.wait() [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] result = hub.switch() [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] return self.greenlet.switch() [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] self.f(*self.args, **self.kw) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] raise exceptions.translate_fault(task_info.error) [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Faults: ['InvalidArgument'] [ 1477.312065] env[69227]: ERROR nova.compute.manager [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] [ 1477.313179] env[69227]: DEBUG nova.compute.utils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1477.314081] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Build of instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 was re-scheduled: A specified parameter was not correct: fileType [ 1477.314081] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1477.314456] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1477.314629] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1477.314822] env[69227]: DEBUG nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1477.315012] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1478.050928] env[69227]: DEBUG nova.network.neutron [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.555044] env[69227]: INFO nova.compute.manager [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Took 1.24 seconds to deallocate network for instance. [ 1479.586907] env[69227]: INFO nova.scheduler.client.report [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleted allocations for instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 [ 1480.098367] env[69227]: DEBUG oslo_concurrency.lockutils [None req-be7e8126-292e-4d4b-ac7a-0600485ec4f4 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 656.567s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1480.099694] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 460.005s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.099941] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1480.100173] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1480.100339] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1480.102716] env[69227]: INFO nova.compute.manager [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Terminating instance [ 1480.104468] env[69227]: DEBUG nova.compute.manager [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1480.104732] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1480.105059] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36a576b4-9846-46f7-bedc-7d8f16364e27 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.114933] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5573ffb5-e4e0-49ae-9a06-76af44a0128e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.147336] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9c61d411-b6dd-43c9-a59a-8ff3030e6149 could not be found. [ 1480.147544] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1480.147723] env[69227]: INFO nova.compute.manager [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1480.147968] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1480.148215] env[69227]: DEBUG nova.compute.manager [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1480.148309] env[69227]: DEBUG nova.network.neutron [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1480.603302] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: c2ccfca6-9703-429d-ba58-ee7b155edfce] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1480.667111] env[69227]: DEBUG nova.network.neutron [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.108602] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: c2ccfca6-9703-429d-ba58-ee7b155edfce] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1481.169657] env[69227]: INFO nova.compute.manager [-] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] Took 1.02 seconds to deallocate network for instance. [ 1481.623240] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "c2ccfca6-9703-429d-ba58-ee7b155edfce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.260s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.126066] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc2e221c-f064-4aea-8316-c2d2e01d0597] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1482.192072] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fe9887c3-1f86-4adc-9be9-c745e49b1ca2 tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.092s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.193380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 310.137s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1482.193629] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9c61d411-b6dd-43c9-a59a-8ff3030e6149] During sync_power_state the instance has a pending task (deleting). Skip. [ 1482.193755] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "9c61d411-b6dd-43c9-a59a-8ff3030e6149" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.630475] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc2e221c-f064-4aea-8316-c2d2e01d0597] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1483.143649] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc2e221c-f064-4aea-8316-c2d2e01d0597" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.755s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1483.646565] env[69227]: DEBUG nova.compute.manager [None req-0cce11cb-6327-444b-9d3c-5483ecc1cb4e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: d61f136b-edb3-4971-a8f5-c96dd86bd2f5] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1484.151419] env[69227]: DEBUG nova.compute.manager [None req-0cce11cb-6327-444b-9d3c-5483ecc1cb4e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: d61f136b-edb3-4971-a8f5-c96dd86bd2f5] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1484.663988] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0cce11cb-6327-444b-9d3c-5483ecc1cb4e tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "d61f136b-edb3-4971-a8f5-c96dd86bd2f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.778s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1485.166772] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1485.689455] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.689732] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.691175] env[69227]: INFO nova.compute.claims [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.885593] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742c93cf-ab4d-4e5c-a421-2913db08ef4c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.893344] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff0bf2a0-579e-41f1-b650-5333a6e26567 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.922917] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca630b8-2e16-4081-9c3d-6b3eeb4b4214 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.930024] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d873027b-b6e3-478a-b958-76e9da903b95 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.942758] env[69227]: DEBUG nova.compute.provider_tree [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.446157] env[69227]: DEBUG nova.scheduler.client.report [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1487.950832] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.261s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.951391] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1488.456687] env[69227]: DEBUG nova.compute.utils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1488.458082] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Not allocating networking since 'none' was specified. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1488.961062] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1489.969862] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1489.995357] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1489.995639] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1489.995840] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1489.996039] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1489.996191] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1489.996336] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1489.996539] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1489.996695] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1489.996856] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1489.997028] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1489.997230] env[69227]: DEBUG nova.virt.hardware [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1489.998083] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df11bed0-4f8a-4ee9-9690-5ab71667f7f5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.006031] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e31d870-139b-43f1-bdf4-ddef0571d1a5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.018788] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance VIF info [] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1490.024094] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Creating folder: Project (7b16be0e639f4ba5bb92123529f081fb). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1490.024333] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe2f95a6-5b8d-43bc-8717-b370bad4e01e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.033725] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Created folder: Project (7b16be0e639f4ba5bb92123529f081fb) in parent group-v694623. [ 1490.033898] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Creating folder: Instances. Parent ref: group-v694700. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1490.034109] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4878124f-07f4-4cea-ab9b-e50130c18aa8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.042999] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Created folder: Instances in parent group-v694700. [ 1490.043236] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1490.043411] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1490.043589] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-720fb259-89ab-4ac4-9928-d3f4d87f268a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.059142] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1490.059142] env[69227]: value = "task-3475102" [ 1490.059142] env[69227]: _type = "Task" [ 1490.059142] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.065805] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475102, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.569381] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475102, 'name': CreateVM_Task, 'duration_secs': 0.263926} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.569381] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1490.569690] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.569852] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1490.570202] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1490.570449] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60348bda-eb28-48e8-a40a-2eaf2b19e12e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.574616] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for the task: (returnval){ [ 1490.574616] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52357681-7b4e-a623-e20d-f7df102094b5" [ 1490.574616] env[69227]: _type = "Task" [ 1490.574616] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.581856] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52357681-7b4e-a623-e20d-f7df102094b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.085368] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1491.085743] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1491.085743] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.371364] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1518.427978] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1521.094461] env[69227]: WARNING oslo_vmware.rw_handles [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1521.094461] env[69227]: ERROR oslo_vmware.rw_handles [ 1521.095124] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1521.097059] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1521.097314] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Copying Virtual Disk [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/86d199c3-7fca-4a9f-b820-e8417d1afbd3/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1521.097604] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f56fcfb6-f729-4e38-8a3b-a275dbbf2734 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.107218] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 1521.107218] env[69227]: value = "task-3475103" [ 1521.107218] env[69227]: _type = "Task" [ 1521.107218] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.115149] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.617531] env[69227]: DEBUG oslo_vmware.exceptions [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1521.617818] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1521.618414] env[69227]: ERROR nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1521.618414] env[69227]: Faults: ['InvalidArgument'] [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] yield resources [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.driver.spawn(context, instance, image_meta, [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._fetch_image_if_missing(context, vi) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] image_cache(vi, tmp_image_ds_loc) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] vm_util.copy_virtual_disk( [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] session._wait_for_task(vmdk_copy_task) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.wait_for_task(task_ref) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return evt.wait() [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = hub.switch() [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.greenlet.switch() [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.f(*self.args, **self.kw) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exceptions.translate_fault(task_info.error) [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Faults: ['InvalidArgument'] [ 1521.618414] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1521.619404] env[69227]: INFO nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Terminating instance [ 1521.620226] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1521.620431] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1521.620673] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3436f44-1f1f-4267-88ac-453193e180f9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.622809] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1521.623009] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1521.623771] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428d5c32-3916-4e3c-88c5-3abde36d6926 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.630763] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1521.631070] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b2d7169-396f-419f-9a0f-1b9000ec1377 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.633292] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1521.633463] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1521.634446] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01cd2131-431c-4ec6-99a5-de40470493ac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.638989] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Waiting for the task: (returnval){ [ 1521.638989] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]528fe56d-4df5-6d9d-877e-933d62226115" [ 1521.638989] env[69227]: _type = "Task" [ 1521.638989] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.646706] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]528fe56d-4df5-6d9d-877e-933d62226115, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.703882] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1521.704126] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1521.704292] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleting the datastore file [datastore2] 43397ae2-14e8-495d-bdd9-54a14e6427e9 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1521.704678] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-43a71adf-b159-4944-8b2f-eb42d2875417 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.712654] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for the task: (returnval){ [ 1521.712654] env[69227]: value = "task-3475105" [ 1521.712654] env[69227]: _type = "Task" [ 1521.712654] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.720472] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475105, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.149841] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1522.150162] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Creating directory with path [datastore2] vmware_temp/243b54c9-51bd-4f9d-9234-f8501e365f10/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.150392] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dac54242-51bf-4e2e-b8b5-b3fdd444fb53 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.161892] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Created directory with path [datastore2] vmware_temp/243b54c9-51bd-4f9d-9234-f8501e365f10/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.162103] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Fetch image to [datastore2] vmware_temp/243b54c9-51bd-4f9d-9234-f8501e365f10/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1522.162280] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/243b54c9-51bd-4f9d-9234-f8501e365f10/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1522.163072] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba69df30-ad6b-4f50-b6bf-741eceb50c37 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.169397] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec4da7f-751d-4179-b61c-a88c051f8f21 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.178104] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4076a0da-44e4-4929-ab50-00ad4637d4f5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.207701] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d3b1750-6920-4798-adfb-5f044adc56d6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.212810] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9ff800ce-7f5c-4db7-bd90-930d96e2064e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.222483] env[69227]: DEBUG oslo_vmware.api [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Task: {'id': task-3475105, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067923} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.222700] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.222908] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1522.223109] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1522.223302] env[69227]: INFO nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1522.225291] env[69227]: DEBUG nova.compute.claims [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1522.225464] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1522.225701] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1522.241196] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1522.396721] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1522.398360] env[69227]: ERROR nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] yield resources [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.driver.spawn(context, instance, image_meta, [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._fetch_image_if_missing(context, vi) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image_fetch(context, vi, tmp_image_ds_loc) [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] images.fetch_image( [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1522.398360] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] metadata = IMAGE_API.get(context, image_ref) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return session.show(context, image_id, [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] _reraise_translated_image_exception(image_id) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise new_exc.with_traceback(exc_trace) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1522.399604] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1522.399604] env[69227]: INFO nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Terminating instance [ 1522.400410] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1522.400627] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.401253] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1522.401443] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1522.401668] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2bffb048-10f2-4102-86d6-8469e16fb057 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.404114] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9853c902-2f01-4a40-b52e-efbd0163154e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.411479] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1522.411703] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2bbbd22-54fd-4753-bd00-5bf4219440ca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.413793] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.413964] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1522.414936] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e671c480-3013-4cb1-9e1d-86abd2115620 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.419343] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for the task: (returnval){ [ 1522.419343] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d2dffe-ab7f-ab03-912a-617daaa7820c" [ 1522.419343] env[69227]: _type = "Task" [ 1522.419343] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.426317] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1522.426541] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d2dffe-ab7f-ab03-912a-617daaa7820c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.479922] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1522.480216] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1522.480437] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Deleting the datastore file [datastore2] 12393e1f-9cb4-4d54-b485-ddc70c65ac47 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.480727] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86406b25-b6ee-4ac6-878a-417d04ff9563 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.486874] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Waiting for the task: (returnval){ [ 1522.486874] env[69227]: value = "task-3475107" [ 1522.486874] env[69227]: _type = "Task" [ 1522.486874] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.494270] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Task: {'id': task-3475107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.918036] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb31772f-f3de-4f3e-a214-fabf4c0ece37 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.931072] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8a9c51-5ed7-4ac2-9ff0-a3d793a802f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.934252] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1522.934514] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Creating directory with path [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.934990] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0ab3835-b180-4fa9-a6bf-507d3b33f7b7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.962205] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a60b7d6-c66e-401f-9ed7-e6488d57b4ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.964593] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Created directory with path [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.964786] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Fetch image to [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1522.964956] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1522.965674] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa669e4-7de2-4221-a862-83cc10684755 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.975290] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73d297b-e8d6-4068-9d27-6d7b4be8f26b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.979148] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f17154-1bba-4d4d-b70d-aba4ff5ea583 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.990589] env[69227]: DEBUG nova.compute.provider_tree [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1522.999143] env[69227]: DEBUG nova.scheduler.client.report [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1523.002841] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90147e6a-8088-426d-b870-257cfccf5ed3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.011294] env[69227]: DEBUG oslo_vmware.api [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Task: {'id': task-3475107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071105} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.035013] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1523.035232] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1523.035429] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1523.035607] env[69227]: INFO nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1523.037326] env[69227]: DEBUG nova.compute.claims [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1523.037499] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1523.038377] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b78ccdf-f422-40b1-adfe-4bdae6606dba {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.043750] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-275c080c-8454-4cb2-8ebb-93bb72edc527 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.064312] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1523.114147] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1523.178269] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1523.178625] env[69227]: DEBUG oslo_vmware.rw_handles [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1523.427022] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1523.508263] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.282s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1523.508830] env[69227]: ERROR nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.508830] env[69227]: Faults: ['InvalidArgument'] [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.driver.spawn(context, instance, image_meta, [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._fetch_image_if_missing(context, vi) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] image_cache(vi, tmp_image_ds_loc) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] vm_util.copy_virtual_disk( [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] session._wait_for_task(vmdk_copy_task) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.wait_for_task(task_ref) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return evt.wait() [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = hub.switch() [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.greenlet.switch() [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.f(*self.args, **self.kw) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exceptions.translate_fault(task_info.error) [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Faults: ['InvalidArgument'] [ 1523.508830] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.509744] env[69227]: DEBUG nova.compute.utils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1523.510658] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.473s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1523.513380] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Build of instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 was re-scheduled: A specified parameter was not correct: fileType [ 1523.513380] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1523.513791] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1523.513963] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1523.514133] env[69227]: DEBUG nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1523.514298] env[69227]: DEBUG nova.network.neutron [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1523.637102] env[69227]: DEBUG neutronclient.v2_0.client [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1523.639077] env[69227]: ERROR nova.compute.manager [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.driver.spawn(context, instance, image_meta, [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._fetch_image_if_missing(context, vi) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] image_cache(vi, tmp_image_ds_loc) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] vm_util.copy_virtual_disk( [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] session._wait_for_task(vmdk_copy_task) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.wait_for_task(task_ref) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return evt.wait() [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = hub.switch() [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.greenlet.switch() [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.f(*self.args, **self.kw) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exceptions.translate_fault(task_info.error) [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Faults: ['InvalidArgument'] [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] During handling of the above exception, another exception occurred: [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._build_and_run_instance(context, instance, image, [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exception.RescheduledException( [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] nova.exception.RescheduledException: Build of instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 was re-scheduled: A specified parameter was not correct: fileType [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Faults: ['InvalidArgument'] [ 1523.639077] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] During handling of the above exception, another exception occurred: [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] exception_handler_v20(status_code, error_body) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise client_exc(message=error_message, [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Neutron server returns request_ids: ['req-ca903dc8-3afc-4010-8b81-ad553e812455'] [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] During handling of the above exception, another exception occurred: [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _cleanup_allocated_networks [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._deallocate_network(context, instance, requested_networks) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.network_api.deallocate_for_instance( [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] data = neutron.list_ports(**search_opts) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.list('ports', self.ports_path, retrieve_all, [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] for r in self._pagination(collection, path, **params): [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] res = self.get(path, params=params) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.retry_request("GET", action, body=body, [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1523.640079] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.do_request(method, action, body=body, [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._handle_fault_response(status_code, replybody, resp) [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exception.Unauthorized() [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] nova.exception.Unauthorized: Not authorized. [ 1523.641349] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1524.164828] env[69227]: INFO nova.scheduler.client.report [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Deleted allocations for instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 [ 1524.212498] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905bf3b7-c9ad-4f6e-aae2-f3f03b8044c0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.220169] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9076b54d-8c76-4819-9dc6-0c794a9609b0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.250856] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a9b456-2ac3-4523-88c6-8ad14c3b0bcb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.258137] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9b9256-7a86-4be1-9441-f5603c3de11a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.271306] env[69227]: DEBUG nova.compute.provider_tree [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1524.426614] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1524.673988] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a898c745-2759-40b7-a9dd-e1a22f87b1de tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.076s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1524.675444] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.275s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1524.675575] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Acquiring lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1524.675821] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1524.675933] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1524.677867] env[69227]: INFO nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Terminating instance [ 1524.679394] env[69227]: DEBUG nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1524.679584] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1524.679832] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68e21ff8-69c7-445d-a45b-24e00e2cfaa5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.688567] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64cd359-cdd4-4bcb-b46b-0ae534922614 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.717600] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 43397ae2-14e8-495d-bdd9-54a14e6427e9 could not be found. [ 1524.717600] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1524.717600] env[69227]: INFO nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1524.717771] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1524.717862] env[69227]: DEBUG nova.compute.manager [-] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1524.717968] env[69227]: DEBUG nova.network.neutron [-] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1524.774120] env[69227]: DEBUG nova.scheduler.client.report [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1524.806591] env[69227]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1524.806591] env[69227]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [-] Dynamic interval looping call 'oslo_service.backend.eventlet.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall exception_handler_v20(status_code, error_body) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise client_exc(message=error_message, [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Neutron server returns request_ids: ['req-0ba0467b-8732-4003-ba39-a76bd4a5acef'] [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall During handling of the above exception, another exception occurred: [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = func(*self.args, **self.kw) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = f(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._deallocate_network( [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self.network_api.deallocate_for_instance( [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall data = neutron.list_ports(**search_opts) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall for r in self._pagination(collection, path, **params): [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall res = self.get(path, params=params) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.retry_request("GET", action, body=body, [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.806591] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.do_request(method, action, body=body, [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.807783] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1524.807783] env[69227]: ERROR nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1524.929927] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1525.178508] env[69227]: DEBUG nova.compute.manager [None req-45a8454b-3efd-4641-9fe6-385042711ded tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] [instance: 9b7a3c5c-96f1-461f-8bca-50c44315d737] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1525.279331] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.768s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1525.280096] env[69227]: ERROR nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.driver.spawn(context, instance, image_meta, [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._fetch_image_if_missing(context, vi) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image_fetch(context, vi, tmp_image_ds_loc) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] images.fetch_image( [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] metadata = IMAGE_API.get(context, image_ref) [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1525.280096] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return session.show(context, image_id, [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] _reraise_translated_image_exception(image_id) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise new_exc.with_traceback(exc_trace) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1525.281060] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.281060] env[69227]: DEBUG nova.compute.utils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1525.281849] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.352s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1525.282045] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1525.282205] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1525.282675] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Build of instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 was re-scheduled: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1525.283170] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1525.283338] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1525.283492] env[69227]: DEBUG nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1525.283650] env[69227]: DEBUG nova.network.neutron [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1525.285821] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78390935-0237-4e21-a96e-7a38a52e554e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.294102] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c68a9b-a054-4484-ac38-c635e0a5157b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.307772] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc487f9-246c-4083-a898-a6862415329a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.312450] env[69227]: ERROR nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] exception_handler_v20(status_code, error_body) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise client_exc(message=error_message, [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Neutron server returns request_ids: ['req-0ba0467b-8732-4003-ba39-a76bd4a5acef'] [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] During handling of the above exception, another exception occurred: [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Traceback (most recent call last): [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._delete_instance(context, instance, bdms) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._shutdown_instance(context, instance, bdms) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._try_deallocate_network(context, instance, requested_networks) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] with excutils.save_and_reraise_exception(): [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.force_reraise() [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise self.value [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] _deallocate_network_with_retries() [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return evt.wait() [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = hub.switch() [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.greenlet.switch() [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = func(*self.args, **self.kw) [ 1525.312450] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] result = f(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._deallocate_network( [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self.network_api.deallocate_for_instance( [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] data = neutron.list_ports(**search_opts) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.list('ports', self.ports_path, retrieve_all, [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] for r in self._pagination(collection, path, **params): [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] res = self.get(path, params=params) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.retry_request("GET", action, body=body, [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] return self.do_request(method, action, body=body, [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] ret = obj(*args, **kwargs) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] self._handle_fault_response(status_code, replybody, resp) [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1525.313575] env[69227]: ERROR nova.compute.manager [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] [ 1525.318554] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda154e4-9ea0-4658-b99b-7f4a6a9a76f7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.347819] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180955MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1525.347970] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1525.348206] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1525.390278] env[69227]: DEBUG neutronclient.v2_0.client [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1525.391518] env[69227]: ERROR nova.compute.manager [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.driver.spawn(context, instance, image_meta, [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._fetch_image_if_missing(context, vi) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image_fetch(context, vi, tmp_image_ds_loc) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] images.fetch_image( [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] metadata = IMAGE_API.get(context, image_ref) [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1525.391518] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return session.show(context, image_id, [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] _reraise_translated_image_exception(image_id) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise new_exc.with_traceback(exc_trace) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = getattr(controller, method)(*args, **kwargs) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._get(image_id) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] resp, body = self.http_client.get(url, headers=header) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.request(url, 'GET', **kwargs) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self._handle_response(resp) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exc.from_response(resp, resp.content) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._build_and_run_instance(context, instance, image, [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exception.RescheduledException( [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.RescheduledException: Build of instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 was re-scheduled: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] exception_handler_v20(status_code, error_body) [ 1525.392575] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise client_exc(message=error_message, [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Neutron server returns request_ids: ['req-3882ad33-c1d8-422b-aa7d-1cc4acde76f7'] [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _cleanup_allocated_networks [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._deallocate_network(context, instance, requested_networks) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.network_api.deallocate_for_instance( [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] data = neutron.list_ports(**search_opts) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.list('ports', self.ports_path, retrieve_all, [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] for r in self._pagination(collection, path, **params): [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] res = self.get(path, params=params) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.retry_request("GET", action, body=body, [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.do_request(method, action, body=body, [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._handle_fault_response(status_code, replybody, resp) [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exception.Unauthorized() [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.Unauthorized: Not authorized. [ 1525.393702] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1525.682612] env[69227]: DEBUG nova.compute.manager [None req-45a8454b-3efd-4641-9fe6-385042711ded tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] [instance: 9b7a3c5c-96f1-461f-8bca-50c44315d737] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1525.816590] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.141s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1525.818232] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 353.761s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1525.818580] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] During sync_power_state the instance has a pending task (deleting). Skip. [ 1525.818688] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "43397ae2-14e8-495d-bdd9-54a14e6427e9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1525.918954] env[69227]: INFO nova.scheduler.client.report [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Deleted allocations for instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 [ 1526.194829] env[69227]: DEBUG oslo_concurrency.lockutils [None req-45a8454b-3efd-4641-9fe6-385042711ded tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Lock "9b7a3c5c-96f1-461f-8bca-50c44315d737" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 218.846s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1526.373381] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 859d632d-fb95-4ac6-9219-8768191979a5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.373656] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.373656] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.373771] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.373888] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.374026] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.374140] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.374256] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1526.427387] env[69227]: DEBUG oslo_concurrency.lockutils [None req-70aef843-b1bb-47e0-a958-037aa0f3a919 tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 612.295s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1526.428561] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 416.184s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1526.428789] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Acquiring lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1526.428997] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1526.429190] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1526.431379] env[69227]: INFO nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Terminating instance [ 1526.432999] env[69227]: DEBUG nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1526.433504] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1526.433639] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f40ce67f-58fa-416a-bb08-7263ac6ac5ff {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.442361] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad696a12-0d8f-421f-8355-a15f39210b61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.469506] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 12393e1f-9cb4-4d54-b485-ddc70c65ac47 could not be found. [ 1526.469694] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1526.469864] env[69227]: INFO nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1526.470110] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1526.470330] env[69227]: DEBUG nova.compute.manager [-] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1526.470429] env[69227]: DEBUG nova.network.neutron [-] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1526.558963] env[69227]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1526.559240] env[69227]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [-] Dynamic interval looping call 'oslo_service.backend.eventlet.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall exception_handler_v20(status_code, error_body) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise client_exc(message=error_message, [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Neutron server returns request_ids: ['req-304d75da-a73d-4e17-9efe-2ed9e1950d85'] [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall During handling of the above exception, another exception occurred: [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = func(*self.args, **self.kw) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = f(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._deallocate_network( [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self.network_api.deallocate_for_instance( [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall data = neutron.list_ports(**search_opts) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall for r in self._pagination(collection, path, **params): [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall res = self.get(path, params=params) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.retry_request("GET", action, body=body, [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.do_request(method, action, body=body, [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1526.560049] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1526.561438] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1526.561438] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1526.561438] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1526.561438] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1526.561438] env[69227]: ERROR nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1526.698199] env[69227]: DEBUG nova.compute.manager [None req-d9a7e8ef-5f4f-4726-94be-c544b8be83b8 tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] [instance: c540e175-7485-4384-9c45-f8a6b0c64b7b] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1526.877088] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1526.934143] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1527.065104] env[69227]: ERROR nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] exception_handler_v20(status_code, error_body) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise client_exc(message=error_message, [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Neutron server returns request_ids: ['req-304d75da-a73d-4e17-9efe-2ed9e1950d85'] [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During handling of the above exception, another exception occurred: [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Traceback (most recent call last): [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._delete_instance(context, instance, bdms) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._shutdown_instance(context, instance, bdms) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._try_deallocate_network(context, instance, requested_networks) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] with excutils.save_and_reraise_exception(): [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.force_reraise() [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise self.value [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] _deallocate_network_with_retries() [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return evt.wait() [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = hub.switch() [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.greenlet.switch() [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = func(*self.args, **self.kw) [ 1527.065104] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] result = f(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._deallocate_network( [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self.network_api.deallocate_for_instance( [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] data = neutron.list_ports(**search_opts) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.list('ports', self.ports_path, retrieve_all, [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] for r in self._pagination(collection, path, **params): [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] res = self.get(path, params=params) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.retry_request("GET", action, body=body, [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] return self.do_request(method, action, body=body, [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] ret = obj(*args, **kwargs) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] self._handle_fault_response(status_code, replybody, resp) [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1527.066229] env[69227]: ERROR nova.compute.manager [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] [ 1527.201639] env[69227]: DEBUG nova.compute.manager [None req-d9a7e8ef-5f4f-4726-94be-c544b8be83b8 tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] [instance: c540e175-7485-4384-9c45-f8a6b0c64b7b] Instance disappeared before build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1527.329366] env[69227]: INFO nova.compute.manager [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] [instance: 43397ae2-14e8-495d-bdd9-54a14e6427e9] Successfully reverted task state from None on failure for instance. [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server [None req-fd4f65a1-82ec-4564-a876-7e64c90f793f tempest-DeleteServersAdminTestJSON-1441896639 tempest-DeleteServersAdminTestJSON-1441896639-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-0ba0467b-8732-4003-ba39-a76bd4a5acef'] [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 1527.333820] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1527.335711] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1527.337054] env[69227]: ERROR oslo_messaging.rpc.server [ 1527.380130] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1527.458188] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1527.572067] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 1.143s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.572754] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 355.515s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1527.572949] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] During sync_power_state the instance has a pending task (deleting). Skip. [ 1527.573157] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "12393e1f-9cb4-4d54-b485-ddc70c65ac47" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.714033] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d9a7e8ef-5f4f-4726-94be-c544b8be83b8 tempest-ServerShowV247Test-1957689290 tempest-ServerShowV247Test-1957689290-project-member] Lock "c540e175-7485-4384-9c45-f8a6b0c64b7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 220.177s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1527.883136] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1528.216605] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1528.385798] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1528.734544] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1528.888441] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.081984] env[69227]: INFO nova.compute.manager [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] [instance: 12393e1f-9cb4-4d54-b485-ddc70c65ac47] Successfully reverted task state from None on failure for instance. [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server [None req-f16c61f9-862b-472e-a379-60ed107736be tempest-ListImageFiltersTestJSON-1281810572 tempest-ListImageFiltersTestJSON-1281810572-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-304d75da-a73d-4e17-9efe-2ed9e1950d85'] [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 1529.086111] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1529.087727] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1529.089188] env[69227]: ERROR oslo_messaging.rpc.server [ 1529.391850] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.896912] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 40a459c7-657d-40db-aa78-d16af085a3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1529.897200] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1529.897352] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1530.057825] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f535617-cdde-4772-8ebb-a6768f1b1810 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.065407] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fbe4de-2a57-4e6b-a4e3-85541f3beeec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.095501] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f79ba7-8fe9-4c46-af81-5afff065831c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.102655] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb539ab-1716-4492-b99b-25de9e43b224 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.116068] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.618945] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1531.124683] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1531.124916] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.777s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1531.125210] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.667s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1531.126798] env[69227]: INFO nova.compute.claims [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1532.130108] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.130350] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.313952] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e5efe3-c518-4498-8d0b-6729e7b4f953 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.321878] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20d20f4-9907-4513-8ca6-48e318d67f72 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.352593] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781831d9-d915-4ad0-9349-588ee2c20e7a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.361026] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a6e1932-fde3-4749-95b7-94eeefc5032e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.373400] env[69227]: DEBUG nova.compute.provider_tree [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1532.635802] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1532.636479] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1532.636479] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1532.876626] env[69227]: DEBUG nova.scheduler.client.report [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1532.945959] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1533.140051] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140318] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140318] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140431] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140519] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140640] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140759] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.140907] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.141083] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1533.141245] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1533.141479] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.142133] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.142133] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1533.142133] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1533.381736] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.256s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1533.382282] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1533.384908] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.650s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1533.386365] env[69227]: INFO nova.compute.claims [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1533.890114] env[69227]: DEBUG nova.compute.claims [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1533.890114] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1533.890114] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.505s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1533.890837] env[69227]: DEBUG nova.compute.utils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Instance 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12 could not be found. {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1533.891957] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.002s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1533.894616] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Instance disappeared during build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 1533.894789] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1533.894998] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "refresh_cache-0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.895199] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "refresh_cache-0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1533.895371] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1534.401056] env[69227]: DEBUG nova.compute.utils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Can not refresh info_cache because instance was not found {{(pid=69227) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1534.426540] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1534.494389] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.563945] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b542030-ec41-44c8-8b1c-128f04aa5868 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.572573] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7936d8e-cd2f-4c55-9fb0-3cc612007b02 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.602185] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b89402c-139c-49fa-94a0-913cbb2a36d7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.609236] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ac7532-4b08-44e3-84a4-717f837e67da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.622034] env[69227]: DEBUG nova.compute.provider_tree [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1534.997309] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "refresh_cache-0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1534.997538] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1534.997726] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1534.997973] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1535.013513] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1535.124785] env[69227]: DEBUG nova.scheduler.client.report [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1535.515814] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1535.629352] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.737s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1535.630151] env[69227]: DEBUG nova.compute.utils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Conflict updating instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1535.631706] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance disappeared during build. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2513}} [ 1535.631881] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1535.632109] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.632257] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1535.632416] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1536.018573] env[69227]: INFO nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 0d6d7f24-b7e5-419d-9fef-c9e0d34eac12] Took 1.02 seconds to deallocate network for instance. [ 1536.149753] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1536.210351] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1536.714049] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1536.714049] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1536.714049] env[69227]: DEBUG nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1536.714049] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1536.728114] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1537.035897] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "0d6d7f24-b7e5-419d-9fef-c9e0d34eac12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.266s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1537.231808] env[69227]: DEBUG nova.network.neutron [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.538793] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1537.734264] env[69227]: INFO nova.compute.manager [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Took 1.02 seconds to deallocate network for instance. [ 1538.060591] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.060987] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.063350] env[69227]: INFO nova.compute.claims [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1538.762141] env[69227]: INFO nova.scheduler.client.report [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Deleted allocations for instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 [ 1538.762995] env[69227]: DEBUG oslo_concurrency.lockutils [None req-82dfe7df-8d30-4533-a7b0-be697115e9ed tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.025s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1538.763918] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 5.818s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.764154] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1538.764359] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1538.764991] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1538.766378] env[69227]: INFO nova.compute.manager [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Terminating instance [ 1538.767926] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquiring lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.768107] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Acquired lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1538.768284] env[69227]: DEBUG nova.network.neutron [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1539.228291] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7673807-e908-454d-b160-a54c3c2643f8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.235877] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aff3c8f-6775-4bc8-bcce-e4ecee2236a8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.265753] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef72176c-ee2c-4ea1-878a-8322ef42f78b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.268211] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1539.276894] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0c2bbe-ce97-48a5-a8e0-a06dac6507cc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.289749] env[69227]: DEBUG nova.compute.provider_tree [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1539.291539] env[69227]: DEBUG nova.network.neutron [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1539.356045] env[69227]: DEBUG nova.network.neutron [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.789149] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1539.794229] env[69227]: DEBUG nova.scheduler.client.report [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1539.858930] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Releasing lock "refresh_cache-9733c4da-df49-4f87-a8af-5e12c1db7ed6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1539.859389] env[69227]: DEBUG nova.compute.manager [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1539.859589] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1539.859873] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-abc34973-713f-4e3a-bf82-914172d866f3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.868512] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5d1808-4388-4117-a25c-1931f057cf09 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.896475] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9733c4da-df49-4f87-a8af-5e12c1db7ed6 could not be found. [ 1539.896690] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1539.896846] env[69227]: INFO nova.compute.manager [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1539.897144] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1539.897370] env[69227]: DEBUG nova.compute.manager [-] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1539.897468] env[69227]: DEBUG nova.network.neutron [-] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1539.910574] env[69227]: DEBUG nova.network.neutron [-] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1540.299240] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.238s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1540.299765] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1540.302824] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.514s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1540.305168] env[69227]: INFO nova.compute.claims [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1540.412879] env[69227]: DEBUG nova.network.neutron [-] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1540.808802] env[69227]: DEBUG nova.compute.utils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1540.811924] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1540.812103] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1540.848329] env[69227]: DEBUG nova.policy [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d24550af0ac49b9bb2861b85f883963', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef9569e6199c499fb61cdb3b348cc489', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1540.915326] env[69227]: INFO nova.compute.manager [-] [instance: 9733c4da-df49-4f87-a8af-5e12c1db7ed6] Took 1.02 seconds to deallocate network for instance. [ 1541.153539] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Successfully created port: ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1541.313728] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1541.509528] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cbdc433-e2ee-4026-aefa-39c776059bd7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.516925] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06bd2325-d35a-45f0-945b-2e7c0a084c97 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.547239] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd403292-88e4-4f2d-b7dc-0ab79ded32f7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.554227] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca274f30-9c9e-4533-bce3-f1ae57ffcfdd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.567095] env[69227]: DEBUG nova.compute.provider_tree [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.939682] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d6f0055a-6fa3-4b78-92a2-e8a47b98303b tempest-MultipleCreateTestJSON-1621903382 tempest-MultipleCreateTestJSON-1621903382-project-member] Lock "9733c4da-df49-4f87-a8af-5e12c1db7ed6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.176s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1542.070122] env[69227]: DEBUG nova.scheduler.client.report [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1542.329360] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1542.359022] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1542.359260] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1542.359420] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1542.359603] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1542.359744] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1542.359889] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1542.360128] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1542.360347] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1542.360540] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1542.360707] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1542.360879] env[69227]: DEBUG nova.virt.hardware [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1542.361756] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140249b2-5062-4e99-814a-bc4933ffcc0a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.369811] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b2454b-379d-4ef4-a078-a53895ebaee5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.575058] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.272s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1542.575700] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1542.681329] env[69227]: DEBUG nova.compute.manager [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Received event network-vif-plugged-ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1542.681549] env[69227]: DEBUG oslo_concurrency.lockutils [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] Acquiring lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1542.682172] env[69227]: DEBUG oslo_concurrency.lockutils [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1542.682421] env[69227]: DEBUG oslo_concurrency.lockutils [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1542.682523] env[69227]: DEBUG nova.compute.manager [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] No waiting events found dispatching network-vif-plugged-ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1542.682684] env[69227]: WARNING nova.compute.manager [req-bdb0e61b-11e5-4819-9136-9b6ca1e0de93 req-e5156f75-d040-4bca-945f-18c93e18657c service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Received unexpected event network-vif-plugged-ef2605a4-c0a2-4503-967e-bfad9ccc2930 for instance with vm_state building and task_state spawning. [ 1542.730783] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Successfully updated port: ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1543.083261] env[69227]: DEBUG nova.compute.utils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.084793] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1543.084972] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1543.148309] env[69227]: DEBUG nova.policy [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82676d52930a46aca37a4f718e25da7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b10a196d0654c479f190fc16463d71e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1543.234065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1543.234065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquired lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1543.234065] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1543.421466] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Successfully created port: dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1543.593994] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1543.800591] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1543.968306] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Updating instance_info_cache with network_info: [{"id": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "address": "fa:16:3e:35:91:14", "network": {"id": "94293070-89c1-4a2a-ab44-ad2dd21aa88b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1599342421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef9569e6199c499fb61cdb3b348cc489", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2605a4-c0", "ovs_interfaceid": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1544.472384] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Releasing lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1544.472778] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance network_info: |[{"id": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "address": "fa:16:3e:35:91:14", "network": {"id": "94293070-89c1-4a2a-ab44-ad2dd21aa88b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1599342421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef9569e6199c499fb61cdb3b348cc489", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2605a4-c0", "ovs_interfaceid": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1544.473227] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:91:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ef2605a4-c0a2-4503-967e-bfad9ccc2930', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1544.480848] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Creating folder: Project (ef9569e6199c499fb61cdb3b348cc489). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1544.481142] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-74c3289d-b544-4bc3-a71b-9bb4b2973ca3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.491144] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Created folder: Project (ef9569e6199c499fb61cdb3b348cc489) in parent group-v694623. [ 1544.491330] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Creating folder: Instances. Parent ref: group-v694703. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1544.491555] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fe118b4-64af-4eb3-b889-f14bb6427077 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.499594] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Created folder: Instances in parent group-v694703. [ 1544.499807] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1544.499981] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1544.500179] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bb52c040-b628-4044-8ae4-d1c7d62aa7d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.518615] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1544.518615] env[69227]: value = "task-3475110" [ 1544.518615] env[69227]: _type = "Task" [ 1544.518615] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.525678] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475110, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.608057] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1544.632699] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.634113] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.634382] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.634435] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.634603] env[69227]: DEBUG nova.virt.hardware [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.635475] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffb3c5b-602e-42f6-8548-82d9c5162f3e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.643246] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dadbe9-f6d4-4da6-9a07-f27bfd4c7718 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.713895] env[69227]: DEBUG nova.compute.manager [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Received event network-changed-ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1544.713895] env[69227]: DEBUG nova.compute.manager [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Refreshing instance network info cache due to event network-changed-ef2605a4-c0a2-4503-967e-bfad9ccc2930. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1544.713895] env[69227]: DEBUG oslo_concurrency.lockutils [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] Acquiring lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1544.714933] env[69227]: DEBUG oslo_concurrency.lockutils [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] Acquired lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1544.715294] env[69227]: DEBUG nova.network.neutron [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Refreshing network info cache for port ef2605a4-c0a2-4503-967e-bfad9ccc2930 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1545.030093] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475110, 'name': CreateVM_Task, 'duration_secs': 0.294566} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.030290] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1545.030957] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.031135] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1545.031441] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1545.031689] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59115245-8f53-4359-94a5-a5c1cf27b24f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.035932] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for the task: (returnval){ [ 1545.035932] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52080fed-2c06-06cc-65d7-c4c12d7001f3" [ 1545.035932] env[69227]: _type = "Task" [ 1545.035932] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.044435] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52080fed-2c06-06cc-65d7-c4c12d7001f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.112386] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Successfully updated port: dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.424935] env[69227]: DEBUG nova.network.neutron [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Updated VIF entry in instance network info cache for port ef2605a4-c0a2-4503-967e-bfad9ccc2930. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1545.425330] env[69227]: DEBUG nova.network.neutron [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Updating instance_info_cache with network_info: [{"id": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "address": "fa:16:3e:35:91:14", "network": {"id": "94293070-89c1-4a2a-ab44-ad2dd21aa88b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1599342421-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef9569e6199c499fb61cdb3b348cc489", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapef2605a4-c0", "ovs_interfaceid": "ef2605a4-c0a2-4503-967e-bfad9ccc2930", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1545.546822] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1545.547156] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1545.547322] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.615602] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.615804] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquired lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1545.615966] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1545.928145] env[69227]: DEBUG oslo_concurrency.lockutils [req-7ad10622-115e-42c7-991d-8482422ace2a req-6f4825d6-46c5-4acf-85f8-831da48f51f1 service nova] Releasing lock "refresh_cache-8ccb6955-9796-4f7f-bc22-a3e9563d3f43" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1546.154418] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1546.516121] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Updating instance_info_cache with network_info: [{"id": "dca63fda-e07f-4712-a3ea-112c019729c5", "address": "fa:16:3e:47:ed:07", "network": {"id": "1b4de3eb-04ea-4b73-948f-7720ff29f1ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1259715645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b10a196d0654c479f190fc16463d71e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca63fda-e0", "ovs_interfaceid": "dca63fda-e07f-4712-a3ea-112c019729c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.743320] env[69227]: DEBUG nova.compute.manager [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Received event network-vif-plugged-dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1546.743663] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Acquiring lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1546.743782] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1546.743895] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1546.744064] env[69227]: DEBUG nova.compute.manager [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] No waiting events found dispatching network-vif-plugged-dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1546.744239] env[69227]: WARNING nova.compute.manager [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Received unexpected event network-vif-plugged-dca63fda-e07f-4712-a3ea-112c019729c5 for instance with vm_state building and task_state spawning. [ 1546.744397] env[69227]: DEBUG nova.compute.manager [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Received event network-changed-dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1546.744559] env[69227]: DEBUG nova.compute.manager [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Refreshing instance network info cache due to event network-changed-dca63fda-e07f-4712-a3ea-112c019729c5. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1546.744724] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Acquiring lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.018655] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Releasing lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1547.018655] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance network_info: |[{"id": "dca63fda-e07f-4712-a3ea-112c019729c5", "address": "fa:16:3e:47:ed:07", "network": {"id": "1b4de3eb-04ea-4b73-948f-7720ff29f1ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1259715645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b10a196d0654c479f190fc16463d71e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca63fda-e0", "ovs_interfaceid": "dca63fda-e07f-4712-a3ea-112c019729c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1547.018879] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Acquired lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.019064] env[69227]: DEBUG nova.network.neutron [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Refreshing network info cache for port dca63fda-e07f-4712-a3ea-112c019729c5 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1547.022533] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:ed:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dca63fda-e07f-4712-a3ea-112c019729c5', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.028178] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Creating folder: Project (1b10a196d0654c479f190fc16463d71e). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1547.028821] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45535b7f-6d91-4e52-812b-08d80cb6afe5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.041442] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Created folder: Project (1b10a196d0654c479f190fc16463d71e) in parent group-v694623. [ 1547.041628] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Creating folder: Instances. Parent ref: group-v694706. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1547.041856] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-143afd7b-4ef9-4d64-8e11-4d40eaa30026 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.053498] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Created folder: Instances in parent group-v694706. [ 1547.053717] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1547.053967] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1547.054089] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-005745b2-3701-4095-961f-fb8bc25a837a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.079252] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.079252] env[69227]: value = "task-3475113" [ 1547.079252] env[69227]: _type = "Task" [ 1547.079252] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.089899] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475113, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.596769] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475113, 'name': CreateVM_Task, 'duration_secs': 0.313073} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.597043] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1547.598914] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1547.599568] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1547.600868] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1547.601189] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1999c2ad-f002-4027-ac3b-dd2f2af09435 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.607102] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for the task: (returnval){ [ 1547.607102] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52aff8ec-a330-13f0-5f87-10422954caa3" [ 1547.607102] env[69227]: _type = "Task" [ 1547.607102] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.615241] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52aff8ec-a330-13f0-5f87-10422954caa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.822524] env[69227]: DEBUG nova.network.neutron [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Updated VIF entry in instance network info cache for port dca63fda-e07f-4712-a3ea-112c019729c5. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1547.822899] env[69227]: DEBUG nova.network.neutron [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Updating instance_info_cache with network_info: [{"id": "dca63fda-e07f-4712-a3ea-112c019729c5", "address": "fa:16:3e:47:ed:07", "network": {"id": "1b4de3eb-04ea-4b73-948f-7720ff29f1ab", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1259715645-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1b10a196d0654c479f190fc16463d71e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "022c7dd5-6c13-49c7-84f4-8b6c1fda4fb7", "external-id": "nsx-vlan-transportzone-694", "segmentation_id": 694, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdca63fda-e0", "ovs_interfaceid": "dca63fda-e07f-4712-a3ea-112c019729c5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.117543] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1548.117857] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.118124] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.326313] env[69227]: DEBUG oslo_concurrency.lockutils [req-297601a1-7755-451e-97f9-5f0b4dccdef0 req-772fa87d-8e70-4f24-af4d-918deba7dc85 service nova] Releasing lock "refresh_cache-1b975f6d-7e12-44cd-99c4-c480edc286bd" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1548.624710] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1548.624901] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1551.557542] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1569.860812] env[69227]: WARNING oslo_vmware.rw_handles [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1569.860812] env[69227]: ERROR oslo_vmware.rw_handles [ 1569.861506] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1569.863327] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1569.863604] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Copying Virtual Disk [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/4e635079-2331-499f-9cf0-31504a606345/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1569.863914] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d243a60a-7d5c-4183-a34b-6ee9fd7824fc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.871577] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for the task: (returnval){ [ 1569.871577] env[69227]: value = "task-3475114" [ 1569.871577] env[69227]: _type = "Task" [ 1569.871577] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.879370] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Task: {'id': task-3475114, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.382167] env[69227]: DEBUG oslo_vmware.exceptions [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1570.382447] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1570.383019] env[69227]: ERROR nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1570.383019] env[69227]: Faults: ['InvalidArgument'] [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Traceback (most recent call last): [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] yield resources [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self.driver.spawn(context, instance, image_meta, [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self._fetch_image_if_missing(context, vi) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] image_cache(vi, tmp_image_ds_loc) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] vm_util.copy_virtual_disk( [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] session._wait_for_task(vmdk_copy_task) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return self.wait_for_task(task_ref) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return evt.wait() [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] result = hub.switch() [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return self.greenlet.switch() [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self.f(*self.args, **self.kw) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] raise exceptions.translate_fault(task_info.error) [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Faults: ['InvalidArgument'] [ 1570.383019] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] [ 1570.384139] env[69227]: INFO nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Terminating instance [ 1570.384867] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1570.385088] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.385710] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1570.385936] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1570.386182] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a37f5ae-94ed-472c-8c2b-134c37d20f65 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.388740] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dca0613f-eeb7-4de6-be36-4f93993b57fe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.396127] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1570.396351] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ff0f2c2-f46a-426d-aa8c-22ad62bcb635 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.398509] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.398991] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1570.399655] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e14e93-b69d-498f-bcf7-57a9bb206463 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.404203] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1570.404203] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52a5dcb3-b9e4-86eb-47a5-2dbcefc38e77" [ 1570.404203] env[69227]: _type = "Task" [ 1570.404203] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.411183] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52a5dcb3-b9e4-86eb-47a5-2dbcefc38e77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.466536] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1570.466725] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1570.466882] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Deleting the datastore file [datastore2] 859d632d-fb95-4ac6-9219-8768191979a5 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1570.467171] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9aaa68a-07b0-4da5-bd0c-9c10582e8c09 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.473350] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for the task: (returnval){ [ 1570.473350] env[69227]: value = "task-3475116" [ 1570.473350] env[69227]: _type = "Task" [ 1570.473350] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.480912] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Task: {'id': task-3475116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.914542] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1570.914854] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Creating directory with path [datastore2] vmware_temp/44d88eee-8920-4fdf-9126-d45658354788/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.915032] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e2b1ac3-64e6-4b2b-bd6d-f454844b94de {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.925810] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Created directory with path [datastore2] vmware_temp/44d88eee-8920-4fdf-9126-d45658354788/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.926011] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Fetch image to [datastore2] vmware_temp/44d88eee-8920-4fdf-9126-d45658354788/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1570.926196] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/44d88eee-8920-4fdf-9126-d45658354788/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1570.926917] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8f7d9b-bed9-4ddd-8905-43d0029df514 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.933094] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25062b7-3015-45ee-8f4c-46912d46e750 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.943011] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f90f4b45-5199-46da-9035-a25c21a41807 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.973835] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566143fe-6bb3-4034-8c6e-ecf3fb7c7832 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.984179] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8b0f932c-a70f-4071-9f57-aa9ebe83e096 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.985801] env[69227]: DEBUG oslo_vmware.api [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Task: {'id': task-3475116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074001} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.986045] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1570.986233] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1570.986401] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1570.986566] env[69227]: INFO nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1570.988652] env[69227]: DEBUG nova.compute.claims [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1570.988815] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1570.989063] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1571.006617] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1571.128783] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1571.129689] env[69227]: ERROR nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] yield resources [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.driver.spawn(context, instance, image_meta, [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._fetch_image_if_missing(context, vi) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image_fetch(context, vi, tmp_image_ds_loc) [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] images.fetch_image( [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1571.129689] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] metadata = IMAGE_API.get(context, image_ref) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return session.show(context, image_id, [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] _reraise_translated_image_exception(image_id) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise new_exc.with_traceback(exc_trace) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1571.130839] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1571.130839] env[69227]: INFO nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Terminating instance [ 1571.131647] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1571.131889] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.132174] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91773117-2283-468d-a8f4-266a87e0dde2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.134863] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1571.135156] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1571.136089] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28051a7e-d174-41ed-8d38-d9117c6be9fd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.143951] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1571.144196] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94374515-77ec-4318-867b-991f612b8f4e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.146532] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.146703] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1571.147664] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff305987-63cc-449f-aa2c-e785d7ab0389 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.153742] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for the task: (returnval){ [ 1571.153742] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]523e98a7-2cb8-4fbf-baee-93aad879aeba" [ 1571.153742] env[69227]: _type = "Task" [ 1571.153742] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.160846] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]523e98a7-2cb8-4fbf-baee-93aad879aeba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.217249] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1571.217479] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1571.217665] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleting the datastore file [datastore2] ecd508a6-185d-42ce-8bb7-f0e6173d6556 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1571.217924] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0b933fb-182d-4adc-8615-6212adf03aca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.225034] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for the task: (returnval){ [ 1571.225034] env[69227]: value = "task-3475118" [ 1571.225034] env[69227]: _type = "Task" [ 1571.225034] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.234922] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': task-3475118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.645837] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270b896c-a620-46e1-a10d-bf1831570b85 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.653321] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82d1159d-ee64-4e28-a48b-2ff8be2891d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.687673] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1571.687972] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Creating directory with path [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.688392] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c4bbb84-b343-4020-9ddc-072f6b1a23fa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.690420] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dda3386c-c02c-4ca9-973a-cdbf8ef90bad {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.697464] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da189c8a-5467-47da-a705-157fcee4d50e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.702015] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Created directory with path [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.702296] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Fetch image to [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1571.702571] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1571.710503] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c344598-c84f-4cdb-97d6-0bf83e9756bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.712995] env[69227]: DEBUG nova.compute.provider_tree [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.718092] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9908da4f-eaf7-4111-bd2c-6096e6c983c9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.730096] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6443df75-d08d-402b-9a00-683a7436d058 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.738665] env[69227]: DEBUG oslo_vmware.api [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Task: {'id': task-3475118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076588} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.765025] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1571.765025] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1571.765025] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1571.765025] env[69227]: INFO nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1571.765025] env[69227]: DEBUG nova.compute.claims [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1571.765025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1571.765541] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49328f6-9059-46bc-a475-88a255751ae8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.771062] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-6b73e6d6-0fdf-40f1-a9f0-b251ccba6645 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.792549] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1571.840126] env[69227]: DEBUG oslo_vmware.rw_handles [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1571.919315] env[69227]: DEBUG oslo_vmware.rw_handles [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1571.919694] env[69227]: DEBUG oslo_vmware.rw_handles [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1572.216814] env[69227]: DEBUG nova.scheduler.client.report [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1572.721478] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.732s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1572.721997] env[69227]: ERROR nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.721997] env[69227]: Faults: ['InvalidArgument'] [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Traceback (most recent call last): [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self.driver.spawn(context, instance, image_meta, [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self._fetch_image_if_missing(context, vi) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] image_cache(vi, tmp_image_ds_loc) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] vm_util.copy_virtual_disk( [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] session._wait_for_task(vmdk_copy_task) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return self.wait_for_task(task_ref) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return evt.wait() [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] result = hub.switch() [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] return self.greenlet.switch() [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] self.f(*self.args, **self.kw) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] raise exceptions.translate_fault(task_info.error) [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Faults: ['InvalidArgument'] [ 1572.721997] env[69227]: ERROR nova.compute.manager [instance: 859d632d-fb95-4ac6-9219-8768191979a5] [ 1572.722931] env[69227]: DEBUG nova.compute.utils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1572.723870] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.959s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1572.727662] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Build of instance 859d632d-fb95-4ac6-9219-8768191979a5 was re-scheduled: A specified parameter was not correct: fileType [ 1572.727662] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1572.728078] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1572.728265] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1572.728434] env[69227]: DEBUG nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1572.728591] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1573.401977] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5194b2-63e1-4596-a96b-1ddd0a8ead2c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.410161] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f131493-d53a-41bf-bfba-a87ea0e443c6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.440742] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49530229-7476-4048-8039-af4010462216 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.448291] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2175909-e3b8-47f8-812b-5c0c1fb0bca3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.461916] env[69227]: DEBUG nova.compute.provider_tree [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.599939] env[69227]: DEBUG nova.network.neutron [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1573.965543] env[69227]: DEBUG nova.scheduler.client.report [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1574.102898] env[69227]: INFO nova.compute.manager [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Took 1.37 seconds to deallocate network for instance. [ 1574.470610] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.746s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1574.470965] env[69227]: ERROR nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.driver.spawn(context, instance, image_meta, [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._fetch_image_if_missing(context, vi) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image_fetch(context, vi, tmp_image_ds_loc) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] images.fetch_image( [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] metadata = IMAGE_API.get(context, image_ref) [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1574.470965] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return session.show(context, image_id, [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] _reraise_translated_image_exception(image_id) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise new_exc.with_traceback(exc_trace) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1574.472085] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.472085] env[69227]: DEBUG nova.compute.utils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1574.473459] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Build of instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 was re-scheduled: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1574.473972] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1574.474171] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1574.474328] env[69227]: DEBUG nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1574.474492] env[69227]: DEBUG nova.network.neutron [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1574.585779] env[69227]: DEBUG neutronclient.v2_0.client [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1574.586755] env[69227]: ERROR nova.compute.manager [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.driver.spawn(context, instance, image_meta, [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._fetch_image_if_missing(context, vi) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image_fetch(context, vi, tmp_image_ds_loc) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] images.fetch_image( [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] metadata = IMAGE_API.get(context, image_ref) [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1206, in get [ 1574.586755] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return session.show(context, image_id, [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] _reraise_translated_image_exception(image_id) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 1032, in _reraise_translated_image_exception [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise new_exc.with_traceback(exc_trace) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = getattr(controller, method)(*args, **kwargs) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._get(image_id) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 652, in inner [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] resp, body = self.http_client.get(url, headers=header) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 599, in get [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.request(url, 'GET', **kwargs) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 376, in request [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self._handle_response(resp) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 116, in _handle_response [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exc.from_response(resp, resp.content) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.ImageNotAuthorized: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2456, in _do_build_and_run_instance [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._build_and_run_instance(context, instance, image, [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2748, in _build_and_run_instance [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exception.RescheduledException( [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.RescheduledException: Build of instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 was re-scheduled: Not authorized for image 78c61090-3613-43e2-b8eb-045dfd47af0c. [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] exception_handler_v20(status_code, error_body) [ 1574.587743] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise client_exc(message=error_message, [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Neutron server returns request_ids: ['req-4c434486-046b-473a-97c1-f96d8275c595'] [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _cleanup_allocated_networks [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._deallocate_network(context, instance, requested_networks) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.network_api.deallocate_for_instance( [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] data = neutron.list_ports(**search_opts) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.list('ports', self.ports_path, retrieve_all, [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] for r in self._pagination(collection, path, **params): [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] res = self.get(path, params=params) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.retry_request("GET", action, body=body, [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.do_request(method, action, body=body, [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._handle_fault_response(status_code, replybody, resp) [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exception.Unauthorized() [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.Unauthorized: Not authorized. [ 1574.588708] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1575.113230] env[69227]: INFO nova.scheduler.client.report [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Deleted allocations for instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 [ 1575.139227] env[69227]: INFO nova.scheduler.client.report [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Deleted allocations for instance 859d632d-fb95-4ac6-9219-8768191979a5 [ 1575.624663] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5556061a-c6bf-4439-8cc7-18badee11106 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 572.137s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.626061] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 376.747s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.626353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.626505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.627325] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.628795] env[69227]: INFO nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Terminating instance [ 1575.630404] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquiring lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.630567] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Acquired lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1575.630717] env[69227]: DEBUG nova.network.neutron [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1575.647235] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c580ac27-635b-47a9-8184-8578465b7d10 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.856s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.648273] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 420.325s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.648500] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Acquiring lock "859d632d-fb95-4ac6-9219-8768191979a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1575.648699] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1575.648854] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1575.650480] env[69227]: INFO nova.compute.manager [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Terminating instance [ 1575.652032] env[69227]: DEBUG nova.compute.manager [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1575.652208] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1575.652962] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b0ecb919-a7cf-48ce-9b1b-1534a98c43b5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.662998] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf10784-b107-49d7-a445-e7d438d577b3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.689942] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 859d632d-fb95-4ac6-9219-8768191979a5 could not be found. [ 1575.690174] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1575.690351] env[69227]: INFO nova.compute.manager [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1575.690586] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1575.690814] env[69227]: DEBUG nova.compute.manager [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1575.690911] env[69227]: DEBUG nova.network.neutron [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1576.129608] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1576.151259] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1576.207862] env[69227]: DEBUG nova.network.neutron [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.576160] env[69227]: DEBUG nova.network.neutron [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Updating instance_info_cache with network_info: [{"id": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "address": "fa:16:3e:a6:3f:e5", "network": {"id": "fd4eb7ff-e68e-4292-b7e1-889944c7adeb", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.73", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "f9a97342f8234df5a8f3fca89b9f407b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17c8935c-c6", "ovs_interfaceid": "17c8935c-c67f-4c80-8da9-23a4db6c4ad8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.653777] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.654098] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1576.657316] env[69227]: INFO nova.compute.claims [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1576.672998] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1576.709725] env[69227]: INFO nova.compute.manager [-] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] Took 1.02 seconds to deallocate network for instance. [ 1577.079504] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Releasing lock "refresh_cache-ecd508a6-185d-42ce-8bb7-f0e6173d6556" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1577.079935] env[69227]: DEBUG nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1577.080157] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1577.080467] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b38831c8-69bd-42b0-a3ba-b6441529de55 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.088779] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d5c57a3-1e97-4d78-92bf-3e1d75e8ee3e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.121850] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ecd508a6-185d-42ce-8bb7-f0e6173d6556 could not be found. [ 1577.121850] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1577.121850] env[69227]: INFO nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1577.121850] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1577.121850] env[69227]: DEBUG nova.compute.manager [-] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1577.121850] env[69227]: DEBUG nova.network.neutron [-] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1577.218659] env[69227]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1577.218895] env[69227]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [-] Dynamic interval looping call 'oslo_service.backend.eventlet.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall exception_handler_v20(status_code, error_body) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise client_exc(message=error_message, [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Neutron server returns request_ids: ['req-75950789-f979-43f7-968e-a9c6404b7f70'] [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall During handling of the above exception, another exception occurred: [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = func(*self.args, **self.kw) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = f(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._deallocate_network( [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self.network_api.deallocate_for_instance( [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall data = neutron.list_ports(**search_opts) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall for r in self._pagination(collection, path, **params): [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall res = self.get(path, params=params) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.retry_request("GET", action, body=body, [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.do_request(method, action, body=body, [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1577.219417] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1577.220798] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1577.220798] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1577.220798] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1577.220798] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 1577.220798] env[69227]: ERROR nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1577.724680] env[69227]: ERROR nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] exception_handler_v20(status_code, error_body) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise client_exc(message=error_message, [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Neutron server returns request_ids: ['req-75950789-f979-43f7-968e-a9c6404b7f70'] [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] During handling of the above exception, another exception occurred: [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Traceback (most recent call last): [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._delete_instance(context, instance, bdms) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._shutdown_instance(context, instance, bdms) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._try_deallocate_network(context, instance, requested_networks) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] with excutils.save_and_reraise_exception(): [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.force_reraise() [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise self.value [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] _deallocate_network_with_retries() [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return evt.wait() [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = hub.switch() [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.greenlet.switch() [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = func(*self.args, **self.kw) [ 1577.724680] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] result = f(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._deallocate_network( [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self.network_api.deallocate_for_instance( [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] data = neutron.list_ports(**search_opts) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.list('ports', self.ports_path, retrieve_all, [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] for r in self._pagination(collection, path, **params): [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] res = self.get(path, params=params) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.retry_request("GET", action, body=body, [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] return self.do_request(method, action, body=body, [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] ret = obj(*args, **kwargs) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] self._handle_fault_response(status_code, replybody, resp) [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1577.726184] env[69227]: ERROR nova.compute.manager [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] [ 1577.736065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8be62836-37a0-44a6-9ee6-1cc21bd71468 tempest-ServersAdminTestJSON-1566599615 tempest-ServersAdminTestJSON-1566599615-project-member] Lock "859d632d-fb95-4ac6-9219-8768191979a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.088s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1577.737314] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "859d632d-fb95-4ac6-9219-8768191979a5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 405.680s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1577.737500] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 859d632d-fb95-4ac6-9219-8768191979a5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1577.737677] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "859d632d-fb95-4ac6-9219-8768191979a5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1577.810738] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a127a2-f4bf-4b07-85ab-501a6cda7c4b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.818066] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee41ebb-8d5d-42e7-9c76-295595b32911 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.848643] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562561ee-a95e-44b1-85bc-8b398a047f72 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.855387] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e837c8d8-3001-413f-8c75-7aad454d4011 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.868053] env[69227]: DEBUG nova.compute.provider_tree [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.231846] env[69227]: DEBUG oslo_concurrency.lockutils [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Lock "ecd508a6-185d-42ce-8bb7-f0e6173d6556" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.606s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1578.370452] env[69227]: DEBUG nova.scheduler.client.report [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1578.875681] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1578.876292] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1578.878791] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.206s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1578.881138] env[69227]: INFO nova.compute.claims [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1579.385415] env[69227]: DEBUG nova.compute.utils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1579.388513] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1579.388681] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1579.424410] env[69227]: DEBUG nova.policy [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e5011af3fae4879ac12a1691b238d8c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e914649489446e2a10d1705566da39b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1579.427387] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1579.739391] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Successfully created port: d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1579.742074] env[69227]: INFO nova.compute.manager [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] [instance: ecd508a6-185d-42ce-8bb7-f0e6173d6556] Successfully reverted task state from None on failure for instance. [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server [None req-32b1db4b-88c3-44a0-9dc3-41a72cdcfc75 tempest-MigrationsAdminTest-483302050 tempest-MigrationsAdminTest-483302050-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-75950789-f979-43f7-968e-a9c6404b7f70'] [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 1579.745800] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1579.750261] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1579.751897] env[69227]: ERROR oslo_messaging.rpc.server [ 1579.889829] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1580.071926] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b5d58c-ea2a-43fd-a185-471d42b9c1ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.079201] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d512d6-2e45-4687-8455-3875a125f9f2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.111660] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c0829a-c297-411b-9fd3-7ba86b7ab415 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.118906] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2e8975-56e3-4ddd-ab7f-9f8ea62d9d0a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.131889] env[69227]: DEBUG nova.compute.provider_tree [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.635507] env[69227]: DEBUG nova.scheduler.client.report [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1580.902053] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1580.926666] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1580.926927] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1580.927109] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.927297] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1580.927444] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.927591] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1580.927799] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1580.927958] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1580.928179] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1580.928348] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1580.928522] env[69227]: DEBUG nova.virt.hardware [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1580.929382] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b1160d7-0fcf-4a23-a854-1760eb32b94f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.937063] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5907be1-8d52-40ba-a732-e9a2903664ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.140826] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.262s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1581.141364] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1581.253667] env[69227]: DEBUG nova.compute.manager [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Received event network-vif-plugged-d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1581.253921] env[69227]: DEBUG oslo_concurrency.lockutils [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] Acquiring lock "1397d96c-8a1d-4940-9b58-148435f12497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1581.254242] env[69227]: DEBUG oslo_concurrency.lockutils [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] Lock "1397d96c-8a1d-4940-9b58-148435f12497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1581.254414] env[69227]: DEBUG oslo_concurrency.lockutils [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] Lock "1397d96c-8a1d-4940-9b58-148435f12497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1581.254585] env[69227]: DEBUG nova.compute.manager [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] No waiting events found dispatching network-vif-plugged-d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1581.254750] env[69227]: WARNING nova.compute.manager [req-cea819a1-b8fc-45b2-8863-79e0fd7f3dc1 req-62592d51-95b0-4d08-b25a-4cd10b1705a0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Received unexpected event network-vif-plugged-d68be91b-b0d6-44fd-ab0f-581cf06c3641 for instance with vm_state building and task_state spawning. [ 1581.331880] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Successfully updated port: d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1581.646014] env[69227]: DEBUG nova.compute.utils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.647429] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1581.647600] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1581.680982] env[69227]: DEBUG nova.policy [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4163297ae024487943a604b9fd2a71f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dd89399a014fbea28c0afc4d6da8f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1581.834226] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.834380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquired lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1581.834532] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1582.002499] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Successfully created port: 249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1582.151331] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1582.368930] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1582.426516] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1582.488211] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Updating instance_info_cache with network_info: [{"id": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "address": "fa:16:3e:1b:2d:98", "network": {"id": "2706d778-10f3-47bd-854c-b9eed02830f1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1781762304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e914649489446e2a10d1705566da39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd68be91b-b0", "ovs_interfaceid": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.990740] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Releasing lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1582.991082] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance network_info: |[{"id": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "address": "fa:16:3e:1b:2d:98", "network": {"id": "2706d778-10f3-47bd-854c-b9eed02830f1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1781762304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e914649489446e2a10d1705566da39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd68be91b-b0", "ovs_interfaceid": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1582.991501] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:2d:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '365ac5b1-6d83-4dfe-887f-60574d7f6124', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd68be91b-b0d6-44fd-ab0f-581cf06c3641', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1582.999106] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Creating folder: Project (0e914649489446e2a10d1705566da39b). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1582.999376] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c7233882-cf58-46ff-b64f-b5423ff538ac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.009376] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Created folder: Project (0e914649489446e2a10d1705566da39b) in parent group-v694623. [ 1583.009656] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Creating folder: Instances. Parent ref: group-v694709. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1583.009759] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a6dbbf0-3653-4d56-82dc-78e1c69f85ca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.018609] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Created folder: Instances in parent group-v694709. [ 1583.018854] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1583.019077] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1583.019271] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67ea576e-4ad9-4168-99a7-03ee6540e381 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.038704] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1583.038704] env[69227]: value = "task-3475121" [ 1583.038704] env[69227]: _type = "Task" [ 1583.038704] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.046129] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475121, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.159617] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1583.184792] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1583.185117] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1583.185315] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1583.185525] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1583.185688] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1583.185919] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1583.186191] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1583.186364] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1583.186556] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1583.186755] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1583.186929] env[69227]: DEBUG nova.virt.hardware [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1583.187814] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7e9af58-c16c-484d-bb4e-7dc02cffee1e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.195583] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f23017-898c-4db7-b122-5227aa4aab50 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.281118] env[69227]: DEBUG nova.compute.manager [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Received event network-changed-d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1583.281318] env[69227]: DEBUG nova.compute.manager [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Refreshing instance network info cache due to event network-changed-d68be91b-b0d6-44fd-ab0f-581cf06c3641. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1583.281535] env[69227]: DEBUG oslo_concurrency.lockutils [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] Acquiring lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.281676] env[69227]: DEBUG oslo_concurrency.lockutils [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] Acquired lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1583.281837] env[69227]: DEBUG nova.network.neutron [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Refreshing network info cache for port d68be91b-b0d6-44fd-ab0f-581cf06c3641 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1583.548408] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475121, 'name': CreateVM_Task, 'duration_secs': 0.302758} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.551024] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1583.551024] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.551024] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1583.551024] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1583.551024] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3807013e-2cf4-4463-b42d-101304f2cc2b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.555087] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for the task: (returnval){ [ 1583.555087] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c9063e-c118-aee0-9abd-4050770b6a3c" [ 1583.555087] env[69227]: _type = "Task" [ 1583.555087] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.563769] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52c9063e-c118-aee0-9abd-4050770b6a3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.580208] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Successfully updated port: 249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.954582] env[69227]: DEBUG nova.network.neutron [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Updated VIF entry in instance network info cache for port d68be91b-b0d6-44fd-ab0f-581cf06c3641. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1583.954944] env[69227]: DEBUG nova.network.neutron [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Updating instance_info_cache with network_info: [{"id": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "address": "fa:16:3e:1b:2d:98", "network": {"id": "2706d778-10f3-47bd-854c-b9eed02830f1", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1781762304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e914649489446e2a10d1705566da39b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "365ac5b1-6d83-4dfe-887f-60574d7f6124", "external-id": "nsx-vlan-transportzone-138", "segmentation_id": 138, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd68be91b-b0", "ovs_interfaceid": "d68be91b-b0d6-44fd-ab0f-581cf06c3641", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.064768] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1584.065073] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1584.065292] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.082877] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.083018] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1584.083163] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1584.458064] env[69227]: DEBUG oslo_concurrency.lockutils [req-de0cde9b-f572-4385-9aff-eb2cb7a4504f req-bd267f1a-350f-47bc-b9ce-f137536349c0 service nova] Releasing lock "refresh_cache-1397d96c-8a1d-4940-9b58-148435f12497" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1584.612296] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1584.767705] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Updating instance_info_cache with network_info: [{"id": "249a467a-7974-4ff3-9e83-6a9233ad610c", "address": "fa:16:3e:a1:08:13", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249a467a-79", "ovs_interfaceid": "249a467a-7974-4ff3-9e83-6a9233ad610c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.270933] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1585.270933] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance network_info: |[{"id": "249a467a-7974-4ff3-9e83-6a9233ad610c", "address": "fa:16:3e:a1:08:13", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249a467a-79", "ovs_interfaceid": "249a467a-7974-4ff3-9e83-6a9233ad610c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1585.271397] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a1:08:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '249a467a-7974-4ff3-9e83-6a9233ad610c', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.278574] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating folder: Project (52dd89399a014fbea28c0afc4d6da8f5). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1585.278842] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f062184-2614-48df-a715-14a9a098a3c6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.289301] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created folder: Project (52dd89399a014fbea28c0afc4d6da8f5) in parent group-v694623. [ 1585.289528] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating folder: Instances. Parent ref: group-v694712. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1585.289790] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ea6fec74-32be-4c51-9459-c4163a57a7c6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.298271] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created folder: Instances in parent group-v694712. [ 1585.298499] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1585.298686] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1585.298907] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a9a268a-59a0-4f9a-b7d0-544c8ef4e184 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.317397] env[69227]: DEBUG nova.compute.manager [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Received event network-vif-plugged-249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1585.317705] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Acquiring lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1585.317807] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1585.317968] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1585.318176] env[69227]: DEBUG nova.compute.manager [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] No waiting events found dispatching network-vif-plugged-249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1585.318343] env[69227]: WARNING nova.compute.manager [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Received unexpected event network-vif-plugged-249a467a-7974-4ff3-9e83-6a9233ad610c for instance with vm_state building and task_state spawning. [ 1585.318498] env[69227]: DEBUG nova.compute.manager [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Received event network-changed-249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1585.318648] env[69227]: DEBUG nova.compute.manager [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Refreshing instance network info cache due to event network-changed-249a467a-7974-4ff3-9e83-6a9233ad610c. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1585.318841] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Acquiring lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.318950] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Acquired lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1585.319114] env[69227]: DEBUG nova.network.neutron [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Refreshing network info cache for port 249a467a-7974-4ff3-9e83-6a9233ad610c {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1585.324647] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.324647] env[69227]: value = "task-3475124" [ 1585.324647] env[69227]: _type = "Task" [ 1585.324647] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.332822] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475124, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.427186] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1585.835435] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475124, 'name': CreateVM_Task, 'duration_secs': 0.292519} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.835671] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1585.836390] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.836552] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1585.836903] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1585.837166] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebcb784e-c985-4056-bab3-99c86b2e552f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.841775] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 1585.841775] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52fa20be-ecae-d364-07c9-b5e51db3bec1" [ 1585.841775] env[69227]: _type = "Task" [ 1585.841775] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.851286] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52fa20be-ecae-d364-07c9-b5e51db3bec1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.005851] env[69227]: DEBUG nova.network.neutron [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Updated VIF entry in instance network info cache for port 249a467a-7974-4ff3-9e83-6a9233ad610c. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1586.006365] env[69227]: DEBUG nova.network.neutron [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Updating instance_info_cache with network_info: [{"id": "249a467a-7974-4ff3-9e83-6a9233ad610c", "address": "fa:16:3e:a1:08:13", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap249a467a-79", "ovs_interfaceid": "249a467a-7974-4ff3-9e83-6a9233ad610c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1586.352296] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1586.352619] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1586.352762] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.422569] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.426207] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.426391] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.426546] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1586.508609] env[69227]: DEBUG oslo_concurrency.lockutils [req-ffbfdfbb-32cf-4207-a61a-eab8b5b45782 req-d937aa36-a54a-487f-9982-c56b555d1258 service nova] Releasing lock "refresh_cache-dcabb6a4-2b08-47df-8687-18431ee85153" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1586.929587] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1586.929857] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1586.930056] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1586.930224] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1586.931111] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c44cce-9d56-4b79-8e7c-50d1617bdcb4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.939937] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a151d5d-d617-4a4e-8b64-49270697a227 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.953629] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4bf558b-33e3-49e6-8d0e-088c21e6bfac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.960189] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd73ee9-ab82-43b4-a455-23c2f600bfa1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.988383] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180972MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1586.988522] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1586.988722] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.019645] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1588.522563] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 40a459c7-657d-40db-aa78-d16af085a3ee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1589.026394] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1589.026731] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1589.026731] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1589.163921] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5427d1a-c343-46c0-85bf-6458e8b1fcd8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.171530] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f013e0f1-3a15-408b-89db-eaebbc18ed70 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.201128] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048f5ac7-311f-49b2-8514-8253a7796a2e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.207836] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a60bc7-364a-4273-8993-e95f38c1156f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.220829] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1589.724058] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1590.229959] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1590.230246] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.241s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1591.231024] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.231390] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1591.231390] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1591.736206] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736354] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736485] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736610] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736730] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736858] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.736991] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.737180] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.737308] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.737426] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1591.737545] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1591.737756] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1591.737896] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1608.882087] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "53fae914-75b0-414e-b3ce-9d8be3462039" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1608.882418] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1612.604149] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "9944282c-d21a-40b2-9143-f76c288860ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1612.604594] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1621.894025] env[69227]: WARNING oslo_vmware.rw_handles [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1621.894025] env[69227]: ERROR oslo_vmware.rw_handles [ 1621.894025] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1621.895911] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1621.896218] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Copying Virtual Disk [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/39471112-ef9a-4da0-a6d9-e8a557da2c4e/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1621.896506] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0e33a62-f09a-404d-b87a-0f67d6760ca3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.904807] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for the task: (returnval){ [ 1621.904807] env[69227]: value = "task-3475128" [ 1621.904807] env[69227]: _type = "Task" [ 1621.904807] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.912326] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Task: {'id': task-3475128, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.415320] env[69227]: DEBUG oslo_vmware.exceptions [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1622.415623] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1622.416219] env[69227]: ERROR nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.416219] env[69227]: Faults: ['InvalidArgument'] [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Traceback (most recent call last): [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] yield resources [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self.driver.spawn(context, instance, image_meta, [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self._fetch_image_if_missing(context, vi) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] image_cache(vi, tmp_image_ds_loc) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] vm_util.copy_virtual_disk( [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] session._wait_for_task(vmdk_copy_task) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return self.wait_for_task(task_ref) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return evt.wait() [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] result = hub.switch() [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return self.greenlet.switch() [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self.f(*self.args, **self.kw) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] raise exceptions.translate_fault(task_info.error) [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Faults: ['InvalidArgument'] [ 1622.416219] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] [ 1622.417316] env[69227]: INFO nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Terminating instance [ 1622.418561] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1622.418809] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.419427] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1622.419619] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1622.419838] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fb5a1f8-1e54-4876-b586-95a3e3bf5127 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.422066] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4e4737-987f-4da1-adb3-460c91db19a4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.428586] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1622.428788] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d5776e8-0df7-4555-b971-6b02da01efd5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.430807] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.430981] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1622.431936] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-234866fa-d87d-456e-a927-46ba8beaf496 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.436375] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for the task: (returnval){ [ 1622.436375] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]526cf4b6-165c-b3e3-d5da-bfe9bab71171" [ 1622.436375] env[69227]: _type = "Task" [ 1622.436375] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.444272] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]526cf4b6-165c-b3e3-d5da-bfe9bab71171, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.499331] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1622.499555] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1622.499742] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Deleting the datastore file [datastore2] d39f7ea0-82f7-490b-94cf-1c3c19806c7f {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1622.500085] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa7812d3-ff08-4ef9-96f7-102f958ccd6f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.507440] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for the task: (returnval){ [ 1622.507440] env[69227]: value = "task-3475130" [ 1622.507440] env[69227]: _type = "Task" [ 1622.507440] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.516783] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Task: {'id': task-3475130, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.948102] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1622.948102] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Creating directory with path [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1622.948102] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-84206743-1ed6-449d-89c3-55c1e003416f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.969047] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Created directory with path [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1622.969282] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Fetch image to [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1622.969487] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1622.970345] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e04e4dc-8538-48bd-b1c1-05b0734eb8bc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.977677] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13d735a-73b5-43c0-9d75-291f953ffda2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.987073] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74275e1-208e-4652-8320-f6b1bd7e9049 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.022988] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb9e4937-7090-4b67-91af-286ca9f7d749 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.031999] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-345613e9-e700-4e67-a0c7-8536425ecc00 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.033933] env[69227]: DEBUG oslo_vmware.api [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Task: {'id': task-3475130, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100639} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.034199] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1623.034421] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1623.034602] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1623.034772] env[69227]: INFO nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1623.037365] env[69227]: DEBUG nova.compute.claims [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1623.037531] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1623.037796] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1623.062864] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1623.112331] env[69227]: DEBUG oslo_vmware.rw_handles [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1623.175195] env[69227]: DEBUG oslo_vmware.rw_handles [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1623.175598] env[69227]: DEBUG oslo_vmware.rw_handles [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1623.693178] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a02d87b3-af79-42e3-8b44-b3393ede3740 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.700697] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98df64b0-644d-4e7a-bef9-e63f350ba97d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.729759] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5bf35c-462c-44a6-952d-353ddf7971f6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.736349] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8114cd44-5e12-478e-bd5d-106783075697 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.749703] env[69227]: DEBUG nova.compute.provider_tree [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.253223] env[69227]: DEBUG nova.scheduler.client.report [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1624.757539] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.719s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1624.758164] env[69227]: ERROR nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1624.758164] env[69227]: Faults: ['InvalidArgument'] [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Traceback (most recent call last): [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self.driver.spawn(context, instance, image_meta, [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self._fetch_image_if_missing(context, vi) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] image_cache(vi, tmp_image_ds_loc) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] vm_util.copy_virtual_disk( [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] session._wait_for_task(vmdk_copy_task) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return self.wait_for_task(task_ref) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return evt.wait() [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] result = hub.switch() [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] return self.greenlet.switch() [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] self.f(*self.args, **self.kw) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] raise exceptions.translate_fault(task_info.error) [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Faults: ['InvalidArgument'] [ 1624.758164] env[69227]: ERROR nova.compute.manager [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] [ 1624.759178] env[69227]: DEBUG nova.compute.utils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1624.760693] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Build of instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f was re-scheduled: A specified parameter was not correct: fileType [ 1624.760693] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1624.761118] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1624.761353] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1624.761551] env[69227]: DEBUG nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1624.761784] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1625.541132] env[69227]: DEBUG nova.network.neutron [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.044491] env[69227]: INFO nova.compute.manager [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Took 1.28 seconds to deallocate network for instance. [ 1627.075512] env[69227]: INFO nova.scheduler.client.report [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Deleted allocations for instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f [ 1627.585761] env[69227]: DEBUG oslo_concurrency.lockutils [None req-528b596b-f660-4c64-9478-a17e5fe91517 tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 413.431s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1627.587053] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 216.989s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1627.587302] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Acquiring lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1627.587509] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1627.587675] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1627.590149] env[69227]: INFO nova.compute.manager [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Terminating instance [ 1627.591746] env[69227]: DEBUG nova.compute.manager [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1627.591943] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1627.592386] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c35da33-3c19-4b8d-82f9-dc99e149f847 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.601591] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc23ac13-8135-4a43-81ce-8fdb863e8623 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.630015] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d39f7ea0-82f7-490b-94cf-1c3c19806c7f could not be found. [ 1627.630178] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1627.630354] env[69227]: INFO nova.compute.manager [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1627.630587] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1627.630804] env[69227]: DEBUG nova.compute.manager [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1627.630899] env[69227]: DEBUG nova.network.neutron [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1628.093176] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1628.149656] env[69227]: DEBUG nova.network.neutron [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.616007] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1628.616704] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1628.617876] env[69227]: INFO nova.compute.claims [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.651597] env[69227]: INFO nova.compute.manager [-] [instance: d39f7ea0-82f7-490b-94cf-1c3c19806c7f] Took 1.02 seconds to deallocate network for instance. [ 1628.856801] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1629.676424] env[69227]: DEBUG oslo_concurrency.lockutils [None req-aa9b60e7-d5be-4370-912b-0ceb2bbc451c tempest-ServerAddressesNegativeTestJSON-412600784 tempest-ServerAddressesNegativeTestJSON-412600784-project-member] Lock "d39f7ea0-82f7-490b-94cf-1c3c19806c7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.089s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1629.789701] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199241e8-f4a9-4d56-a789-4716ae21f327 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.798357] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f198079-b459-42e3-8272-05e53df381b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.829575] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8965bf01-c261-46a6-9a7e-c6709e41f0db {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.836869] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5635137-2ea4-406d-b87f-00bd5e38037f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.850794] env[69227]: DEBUG nova.compute.provider_tree [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1630.354099] env[69227]: DEBUG nova.scheduler.client.report [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1630.859337] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.243s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1630.859651] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1631.364430] env[69227]: DEBUG nova.compute.utils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1631.365779] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1631.365954] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1631.408493] env[69227]: DEBUG nova.policy [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffb2e003bb4247aeb8a9c8ce9c7f13cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '991c407526cf4eb7abcde2911220437e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1631.613320] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "1397d96c-8a1d-4940-9b58-148435f12497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1631.649993] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Successfully created port: 70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1631.871745] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1632.880297] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1632.907342] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1632.907570] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1632.907722] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1632.907901] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1632.908058] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1632.908208] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1632.909028] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1632.909028] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1632.909028] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1632.909028] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1632.909301] env[69227]: DEBUG nova.virt.hardware [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1632.910014] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb60ae6-c4e9-4c44-946f-92ce463458ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.918067] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a5261d-cc4d-45fe-8695-c173bcb4a34d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.149297] env[69227]: DEBUG nova.compute.manager [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Received event network-vif-plugged-70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1633.149297] env[69227]: DEBUG oslo_concurrency.lockutils [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] Acquiring lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.149297] env[69227]: DEBUG oslo_concurrency.lockutils [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] Lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1633.149297] env[69227]: DEBUG oslo_concurrency.lockutils [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] Lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1633.149297] env[69227]: DEBUG nova.compute.manager [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] No waiting events found dispatching network-vif-plugged-70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1633.149297] env[69227]: WARNING nova.compute.manager [req-578cf0e2-30fa-4650-956e-c0ea7ffd3c57 req-10a6a776-eaa1-4ffe-b0f5-61bc1325e1be service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Received unexpected event network-vif-plugged-70dfabcf-f64c-4c41-951d-b872f007c997 for instance with vm_state building and task_state spawning. [ 1633.239281] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Successfully updated port: 70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1633.726912] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "dcabb6a4-2b08-47df-8687-18431ee85153" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1633.743132] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.743250] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1633.743306] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1634.273968] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1634.391884] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Updating instance_info_cache with network_info: [{"id": "70dfabcf-f64c-4c41-951d-b872f007c997", "address": "fa:16:3e:26:74:15", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dfabcf-f6", "ovs_interfaceid": "70dfabcf-f64c-4c41-951d-b872f007c997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.894319] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1634.894631] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance network_info: |[{"id": "70dfabcf-f64c-4c41-951d-b872f007c997", "address": "fa:16:3e:26:74:15", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dfabcf-f6", "ovs_interfaceid": "70dfabcf-f64c-4c41-951d-b872f007c997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1634.895110] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:74:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ace50835-5731-4c77-b6c0-3076d7b4aa21', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70dfabcf-f64c-4c41-951d-b872f007c997', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1634.902952] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating folder: Project (991c407526cf4eb7abcde2911220437e). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1634.903227] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3b7c2226-6395-456b-8841-02c3586cfb4d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.914605] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created folder: Project (991c407526cf4eb7abcde2911220437e) in parent group-v694623. [ 1634.914781] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating folder: Instances. Parent ref: group-v694719. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1634.915048] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-67204688-929f-439f-b45d-d516b05b78fc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.923908] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created folder: Instances in parent group-v694719. [ 1634.924146] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1634.924326] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1634.924516] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-859cbac9-c3c9-4628-80be-e0d0ec94ec5a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.943903] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1634.943903] env[69227]: value = "task-3475139" [ 1634.943903] env[69227]: _type = "Task" [ 1634.943903] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.951264] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475139, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.178417] env[69227]: DEBUG nova.compute.manager [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Received event network-changed-70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1635.178417] env[69227]: DEBUG nova.compute.manager [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Refreshing instance network info cache due to event network-changed-70dfabcf-f64c-4c41-951d-b872f007c997. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1635.178417] env[69227]: DEBUG oslo_concurrency.lockutils [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] Acquiring lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.178642] env[69227]: DEBUG oslo_concurrency.lockutils [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] Acquired lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1635.178734] env[69227]: DEBUG nova.network.neutron [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Refreshing network info cache for port 70dfabcf-f64c-4c41-951d-b872f007c997 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1635.454157] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475139, 'name': CreateVM_Task, 'duration_secs': 0.29309} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.454498] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1635.454999] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.455194] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1635.456062] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1635.456062] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04d8bd46-15fa-461a-95e7-98f44b9af631 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.462485] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 1635.462485] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5291d17e-089e-6937-5318-8a36ea764c20" [ 1635.462485] env[69227]: _type = "Task" [ 1635.462485] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.469977] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5291d17e-089e-6937-5318-8a36ea764c20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.856300] env[69227]: DEBUG nova.network.neutron [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Updated VIF entry in instance network info cache for port 70dfabcf-f64c-4c41-951d-b872f007c997. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1635.856718] env[69227]: DEBUG nova.network.neutron [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Updating instance_info_cache with network_info: [{"id": "70dfabcf-f64c-4c41-951d-b872f007c997", "address": "fa:16:3e:26:74:15", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70dfabcf-f6", "ovs_interfaceid": "70dfabcf-f64c-4c41-951d-b872f007c997", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1635.973580] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1635.973846] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1635.974081] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.974231] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1635.974411] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1635.974657] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c8118b8-89c7-4ebe-ae33-58ff12b27134 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.991689] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1635.991938] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1635.992741] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60ce6b37-7033-49c1-b570-32f0f147764f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.998222] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 1635.998222] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bbcfa5-bc39-6092-3c05-2a81a3e5b16c" [ 1635.998222] env[69227]: _type = "Task" [ 1635.998222] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.005827] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52bbcfa5-bc39-6092-3c05-2a81a3e5b16c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.359816] env[69227]: DEBUG oslo_concurrency.lockutils [req-76fa95af-7167-4354-9c90-7bbcfcdbceea req-6912dfd4-21ec-4c34-b232-89de72c0ef9d service nova] Releasing lock "refresh_cache-40a459c7-657d-40db-aa78-d16af085a3ee" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1636.507971] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1636.508322] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating directory with path [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1636.508454] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41590837-0ecc-40f7-ba59-db2941feede9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.519889] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created directory with path [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1636.520085] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Fetch image to [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1636.520256] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore1 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1636.520992] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fdd5f0-3ea5-4a7d-a0e6-fdb308e64e38 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.527282] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3af90f-5264-4dd4-951c-30a2b16a3b63 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.535875] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ba65ad-d1ef-4ec5-9603-2be1edd0fede {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.565325] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5642af4-af38-46d7-a9d0-ea4548bb9780 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.570855] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-df8f6b5f-2c05-45cf-9a11-24c4bdfaea19 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.591078] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore1 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1636.640900] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1636.700878] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1636.701141] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1638.151853] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "598e8def-9627-4bd6-860b-50370c98b23b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1638.152183] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1641.427120] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1643.427611] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1646.422546] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1646.426450] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.426965] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.427309] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1647.427376] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1648.427982] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1648.428363] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1648.931518] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1648.931777] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1648.931952] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1648.932123] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1648.933241] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35387f5-7c6c-4e17-a23d-2e9826b4fcda {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.941942] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d6c8f0-0d71-4ef5-8c61-8403470c5710 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.956262] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58939ce0-0d69-45ff-bc8d-4b1766c556c9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.962762] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76c9c94-ce3a-4079-afdd-1c7b004a4540 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.991577] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180933MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1648.991785] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1648.992091] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1650.024193] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 6085a4f8-f595-417c-9d33-22376a687be6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.024478] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.024478] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.024649] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.024808] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.024990] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.025171] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.025338] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.025485] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.025629] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 40a459c7-657d-40db-aa78-d16af085a3ee actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1650.528948] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.033106] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1651.536995] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1652.040054] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 598e8def-9627-4bd6-860b-50370c98b23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1652.040380] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1652.040380] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1652.195346] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e6f4fbf-33fa-4922-bad8-f5f28dc11ec1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.203464] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73f3fa87-74d5-47a2-8651-90c7697cd13b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.234150] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca084a4-301a-416b-b2d8-6dc0607baee6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.241918] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65713969-762d-4e25-9349-1a8a90f12b7b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.255099] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.758051] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1653.263673] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1653.263673] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.271s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1654.263187] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1654.263412] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1654.263506] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1654.770340] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.770622] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.770698] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.770823] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.770943] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771074] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771195] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771311] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771425] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771554] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1654.771679] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1656.434862] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1656.435117] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1656.833656] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "40a459c7-657d-40db-aa78-d16af085a3ee" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1656.930816] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1670.020388] env[69227]: WARNING oslo_vmware.rw_handles [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1670.020388] env[69227]: ERROR oslo_vmware.rw_handles [ 1670.021132] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1670.023117] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1670.023356] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Copying Virtual Disk [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/97acb90f-4ca4-4430-8b6b-c353537538d2/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1670.023641] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c6d2ad8-8ca2-404b-9517-3680732dac30 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.033616] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for the task: (returnval){ [ 1670.033616] env[69227]: value = "task-3475141" [ 1670.033616] env[69227]: _type = "Task" [ 1670.033616] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.041823] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Task: {'id': task-3475141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.544759] env[69227]: DEBUG oslo_vmware.exceptions [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1670.545036] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1670.545635] env[69227]: ERROR nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1670.545635] env[69227]: Faults: ['InvalidArgument'] [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Traceback (most recent call last): [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] yield resources [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self.driver.spawn(context, instance, image_meta, [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self._fetch_image_if_missing(context, vi) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] image_cache(vi, tmp_image_ds_loc) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] vm_util.copy_virtual_disk( [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] session._wait_for_task(vmdk_copy_task) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return self.wait_for_task(task_ref) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return evt.wait() [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] result = hub.switch() [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return self.greenlet.switch() [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self.f(*self.args, **self.kw) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] raise exceptions.translate_fault(task_info.error) [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Faults: ['InvalidArgument'] [ 1670.545635] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] [ 1670.546872] env[69227]: INFO nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Terminating instance [ 1670.547450] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1670.547656] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1670.547910] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9043b4b4-a187-40c2-a79e-a8ae5e6ad184 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.549989] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1670.550197] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1670.550915] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5ad259-e971-4647-93f6-ea265bd8f4bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.558040] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1670.558246] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26ea7a71-1e68-44dc-b408-29b4ae14957a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.560275] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1670.560451] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1670.561419] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb778ac-ae99-4be5-8c83-6c07b8444741 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.566115] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1670.566115] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f0a6c9-6c84-78c2-83a2-1dfb682a67de" [ 1670.566115] env[69227]: _type = "Task" [ 1670.566115] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.573385] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f0a6c9-6c84-78c2-83a2-1dfb682a67de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.635990] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1670.636236] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1670.636423] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Deleting the datastore file [datastore2] 6085a4f8-f595-417c-9d33-22376a687be6 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1670.636735] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f233abb7-d4ef-4e7c-b29c-f771c3d6872b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.642959] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for the task: (returnval){ [ 1670.642959] env[69227]: value = "task-3475143" [ 1670.642959] env[69227]: _type = "Task" [ 1670.642959] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.650986] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Task: {'id': task-3475143, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.076925] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1671.077345] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating directory with path [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1671.077388] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a0a7d08-0691-4817-8c1a-2bb9b0b9858d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.090515] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Created directory with path [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1671.090792] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Fetch image to [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1671.090940] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1671.091684] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d8a07d-9068-49da-baa1-586ffc8af9ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.098243] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889cd94b-f603-4e60-9788-7ec44d7fcde2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.107190] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7539d8-1401-46a9-870a-d0e1648733c3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.137728] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730476c3-3151-483a-a411-9abecc54ca8e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.146331] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1729a885-67dc-4225-be2d-354c9d0d96b0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.152893] env[69227]: DEBUG oslo_vmware.api [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Task: {'id': task-3475143, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079492} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.153140] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1671.153328] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1671.153496] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1671.153666] env[69227]: INFO nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1671.155751] env[69227]: DEBUG nova.compute.claims [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1671.156070] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1671.156294] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1671.168097] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1671.217664] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1671.293272] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1671.293482] env[69227]: DEBUG oslo_vmware.rw_handles [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1671.825712] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c43c970-fc40-4591-8d6b-e6aeab0627ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.833498] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49ae210-ab5f-4127-acaf-87ca4d9eec41 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.863844] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28193d2-29fc-437e-a832-3c0f90ab8a80 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.870844] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b659d9e6-ec08-4089-965d-a0f295232b98 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.883508] env[69227]: DEBUG nova.compute.provider_tree [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1672.387063] env[69227]: DEBUG nova.scheduler.client.report [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1672.891607] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.735s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1672.892230] env[69227]: ERROR nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.892230] env[69227]: Faults: ['InvalidArgument'] [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Traceback (most recent call last): [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self.driver.spawn(context, instance, image_meta, [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self._fetch_image_if_missing(context, vi) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] image_cache(vi, tmp_image_ds_loc) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] vm_util.copy_virtual_disk( [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] session._wait_for_task(vmdk_copy_task) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return self.wait_for_task(task_ref) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return evt.wait() [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] result = hub.switch() [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] return self.greenlet.switch() [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] self.f(*self.args, **self.kw) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] raise exceptions.translate_fault(task_info.error) [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Faults: ['InvalidArgument'] [ 1672.892230] env[69227]: ERROR nova.compute.manager [instance: 6085a4f8-f595-417c-9d33-22376a687be6] [ 1672.893156] env[69227]: DEBUG nova.compute.utils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1672.894663] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Build of instance 6085a4f8-f595-417c-9d33-22376a687be6 was re-scheduled: A specified parameter was not correct: fileType [ 1672.894663] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1672.895037] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1672.895252] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1672.895439] env[69227]: DEBUG nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1672.895601] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1673.672928] env[69227]: DEBUG nova.network.neutron [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.176113] env[69227]: INFO nova.compute.manager [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Took 1.28 seconds to deallocate network for instance. [ 1675.210581] env[69227]: INFO nova.scheduler.client.report [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Deleted allocations for instance 6085a4f8-f595-417c-9d33-22376a687be6 [ 1675.721686] env[69227]: DEBUG oslo_concurrency.lockutils [None req-16302680-1886-4c18-bfdf-187dd4f6162b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 459.584s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1675.722994] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 263.901s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1675.723711] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Acquiring lock "6085a4f8-f595-417c-9d33-22376a687be6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1675.723943] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1675.724271] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1675.726774] env[69227]: INFO nova.compute.manager [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Terminating instance [ 1675.728442] env[69227]: DEBUG nova.compute.manager [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1675.728631] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1675.728892] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6a183304-25f8-4513-9196-60820e15f765 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.739454] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65e7c2e6-0143-4e88-9eeb-91187145fde2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.768821] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6085a4f8-f595-417c-9d33-22376a687be6 could not be found. [ 1675.769010] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1675.769179] env[69227]: INFO nova.compute.manager [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1675.769422] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1675.769644] env[69227]: DEBUG nova.compute.manager [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1675.769740] env[69227]: DEBUG nova.network.neutron [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1676.228197] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1676.286754] env[69227]: DEBUG nova.network.neutron [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.748368] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1676.748727] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1676.750322] env[69227]: INFO nova.compute.claims [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.789399] env[69227]: INFO nova.compute.manager [-] [instance: 6085a4f8-f595-417c-9d33-22376a687be6] Took 1.02 seconds to deallocate network for instance. [ 1677.812076] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0b9a233c-3fa3-429a-b81d-60e9c084429b tempest-ServerRescueTestJSON-1123565701 tempest-ServerRescueTestJSON-1123565701-project-member] Lock "6085a4f8-f595-417c-9d33-22376a687be6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.089s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1677.906341] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc31381-b8d8-4e24-a5c8-8709a0b7ec48 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.913928] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41b6f29-f7a0-45e9-89e8-dc3ecf3697fe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.952194] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6a887e-7029-4fa1-a378-77d85dc35bc2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.962116] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c839c2-beb0-459d-8935-d6a88d89fed2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.975855] env[69227]: DEBUG nova.compute.provider_tree [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1678.479221] env[69227]: DEBUG nova.scheduler.client.report [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1678.983829] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.235s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1678.984405] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1679.489631] env[69227]: DEBUG nova.compute.utils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1679.491010] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1679.491182] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1679.537119] env[69227]: DEBUG nova.policy [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bf5b1a2df6a41bbba456c54f29c2ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '732948237883495b892ab3b007d7905d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1679.827654] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Successfully created port: cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1679.994627] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1681.003912] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1681.028716] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1681.029044] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1681.029201] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1681.029401] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1681.029548] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1681.029693] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1681.029899] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1681.030073] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1681.030246] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1681.030405] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1681.030574] env[69227]: DEBUG nova.virt.hardware [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1681.031446] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b4ccaa-7091-48ae-9ac2-678b583b2de4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.039841] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa6fe57-ac19-44b8-a335-eca358df2f40 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.173120] env[69227]: DEBUG nova.compute.manager [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Received event network-vif-plugged-cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1681.173369] env[69227]: DEBUG oslo_concurrency.lockutils [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] Acquiring lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1681.173543] env[69227]: DEBUG oslo_concurrency.lockutils [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1681.173712] env[69227]: DEBUG oslo_concurrency.lockutils [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1681.173880] env[69227]: DEBUG nova.compute.manager [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] No waiting events found dispatching network-vif-plugged-cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1681.174056] env[69227]: WARNING nova.compute.manager [req-db62d531-48bc-4749-953c-f68ca2330d45 req-25162c56-7eb6-416c-acf3-4f13885b6a84 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Received unexpected event network-vif-plugged-cddaf260-513f-4a57-b5fb-bc5c07060fbb for instance with vm_state building and task_state spawning. [ 1681.242882] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Successfully updated port: cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1681.745664] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1681.746855] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1681.746855] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1682.276497] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1682.387886] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Updating instance_info_cache with network_info: [{"id": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "address": "fa:16:3e:26:9a:5c", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcddaf260-51", "ovs_interfaceid": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.890581] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Releasing lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1682.890915] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance network_info: |[{"id": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "address": "fa:16:3e:26:9a:5c", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcddaf260-51", "ovs_interfaceid": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1682.891361] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:9a:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2928baf1-3efb-4205-a786-d9783e51f699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cddaf260-513f-4a57-b5fb-bc5c07060fbb', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1682.898855] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating folder: Project (732948237883495b892ab3b007d7905d). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1682.899167] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-865c847c-8f31-4134-aa19-a30dfe79680e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.911221] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created folder: Project (732948237883495b892ab3b007d7905d) in parent group-v694623. [ 1682.911430] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating folder: Instances. Parent ref: group-v694722. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1682.911680] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b967dda1-889b-4369-bfd8-24b50010ff8b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.920158] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created folder: Instances in parent group-v694722. [ 1682.920384] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1682.920567] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1682.920761] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c113d9b5-4e33-4957-9aa4-623c7880c403 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.938449] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1682.938449] env[69227]: value = "task-3475146" [ 1682.938449] env[69227]: _type = "Task" [ 1682.938449] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.946413] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475146, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.199290] env[69227]: DEBUG nova.compute.manager [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Received event network-changed-cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1683.199545] env[69227]: DEBUG nova.compute.manager [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Refreshing instance network info cache due to event network-changed-cddaf260-513f-4a57-b5fb-bc5c07060fbb. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1683.199766] env[69227]: DEBUG oslo_concurrency.lockutils [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] Acquiring lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.199911] env[69227]: DEBUG oslo_concurrency.lockutils [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] Acquired lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1683.200088] env[69227]: DEBUG nova.network.neutron [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Refreshing network info cache for port cddaf260-513f-4a57-b5fb-bc5c07060fbb {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1683.448634] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475146, 'name': CreateVM_Task, 'duration_secs': 0.281259} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.449035] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1683.449435] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1683.449598] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1683.449910] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1683.450164] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbc4b69c-db94-4ff3-b53c-2281055bf2dc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.454353] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 1683.454353] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52712766-dad7-12d8-973d-34d00f4603bd" [ 1683.454353] env[69227]: _type = "Task" [ 1683.454353] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.465374] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52712766-dad7-12d8-973d-34d00f4603bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.960690] env[69227]: DEBUG nova.network.neutron [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Updated VIF entry in instance network info cache for port cddaf260-513f-4a57-b5fb-bc5c07060fbb. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1683.961172] env[69227]: DEBUG nova.network.neutron [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Updating instance_info_cache with network_info: [{"id": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "address": "fa:16:3e:26:9a:5c", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcddaf260-51", "ovs_interfaceid": "cddaf260-513f-4a57-b5fb-bc5c07060fbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.966214] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1683.966567] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1683.966931] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.467474] env[69227]: DEBUG oslo_concurrency.lockutils [req-07ac10cc-12d8-4b6b-946a-cee3d33d5943 req-d420ff1c-bc1e-49f0-b77c-af407c1db891 service nova] Releasing lock "refresh_cache-1c3529ac-4abf-46fe-8b40-1e4222e2150a" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1684.499111] env[69227]: WARNING oslo_vmware.rw_handles [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1684.499111] env[69227]: ERROR oslo_vmware.rw_handles [ 1684.499623] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore1 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1684.500922] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1684.501207] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Copying Virtual Disk [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore1] vmware_temp/d7325a82-41d3-46b0-96be-32a5c34d4240/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1684.501497] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac2ff88b-e68d-4497-942e-fa84f96b6162 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.509458] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 1684.509458] env[69227]: value = "task-3475147" [ 1684.509458] env[69227]: _type = "Task" [ 1684.509458] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.517360] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475147, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.019740] env[69227]: DEBUG oslo_vmware.exceptions [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1685.019874] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "[datastore1] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1685.020331] env[69227]: ERROR nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1685.020331] env[69227]: Faults: ['InvalidArgument'] [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Traceback (most recent call last): [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] yield resources [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self.driver.spawn(context, instance, image_meta, [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self._fetch_image_if_missing(context, vi) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] image_cache(vi, tmp_image_ds_loc) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] vm_util.copy_virtual_disk( [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] session._wait_for_task(vmdk_copy_task) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return self.wait_for_task(task_ref) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return evt.wait() [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] result = hub.switch() [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return self.greenlet.switch() [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self.f(*self.args, **self.kw) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] raise exceptions.translate_fault(task_info.error) [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Faults: ['InvalidArgument'] [ 1685.020331] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] [ 1685.021443] env[69227]: INFO nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Terminating instance [ 1685.023479] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1685.023695] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1685.024482] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80f9bc0-8c8d-4f7a-88df-23c0980c61af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.031504] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1685.031504] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe9a8215-96cb-49e4-bd87-0db91f5c4cce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.091915] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1685.092144] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Deleting contents of the VM from datastore datastore1 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1685.092331] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleting the datastore file [datastore1] 40a459c7-657d-40db-aa78-d16af085a3ee {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1685.092591] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acb948ed-b0f8-4cdc-aa33-3ae5836df003 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.100769] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 1685.100769] env[69227]: value = "task-3475149" [ 1685.100769] env[69227]: _type = "Task" [ 1685.100769] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.108526] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475149, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.610093] env[69227]: DEBUG oslo_vmware.api [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475149, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07303} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.610461] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1685.610502] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Deleted contents of the VM from datastore datastore1 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1685.610646] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1685.610829] env[69227]: INFO nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1685.612927] env[69227]: DEBUG nova.compute.claims [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1685.613115] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1685.613353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1686.280381] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-332acc99-225d-4f74-8be7-dd00adc39bdd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.288152] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3195f6-da90-4828-82db-e14ab40d7daa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.317663] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30d09df-9cb3-485a-98cd-17769197e413 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.324383] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fe053a-635a-4c05-8563-d03312caed00 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.337330] env[69227]: DEBUG nova.compute.provider_tree [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1686.841032] env[69227]: DEBUG nova.scheduler.client.report [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1687.345961] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.732s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1687.346573] env[69227]: ERROR nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1687.346573] env[69227]: Faults: ['InvalidArgument'] [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Traceback (most recent call last): [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self.driver.spawn(context, instance, image_meta, [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self._fetch_image_if_missing(context, vi) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] image_cache(vi, tmp_image_ds_loc) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] vm_util.copy_virtual_disk( [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] session._wait_for_task(vmdk_copy_task) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return self.wait_for_task(task_ref) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return evt.wait() [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] result = hub.switch() [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] return self.greenlet.switch() [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] self.f(*self.args, **self.kw) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] raise exceptions.translate_fault(task_info.error) [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Faults: ['InvalidArgument'] [ 1687.346573] env[69227]: ERROR nova.compute.manager [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] [ 1687.347480] env[69227]: DEBUG nova.compute.utils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1687.349590] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Build of instance 40a459c7-657d-40db-aa78-d16af085a3ee was re-scheduled: A specified parameter was not correct: fileType [ 1687.349590] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1687.349911] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1687.350105] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1687.350324] env[69227]: DEBUG nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1687.350436] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1688.080792] env[69227]: DEBUG nova.network.neutron [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.587967] env[69227]: INFO nova.compute.manager [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Took 1.24 seconds to deallocate network for instance. [ 1689.620894] env[69227]: INFO nova.scheduler.client.report [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleted allocations for instance 40a459c7-657d-40db-aa78-d16af085a3ee [ 1690.131691] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ffa2fd0b-7c37-4967-815f-c975835917b1 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.104s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1690.132964] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 33.300s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1690.133203] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1690.133416] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1690.133580] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1690.135829] env[69227]: INFO nova.compute.manager [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Terminating instance [ 1690.137423] env[69227]: DEBUG nova.compute.manager [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1690.137625] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1690.137914] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98c0b640-bc03-4ad4-b642-c5efedef6972 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.147970] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca71d54-4d82-4ee5-b800-bb7df41993b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.184587] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40a459c7-657d-40db-aa78-d16af085a3ee could not be found. [ 1690.185250] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1690.185586] env[69227]: INFO nova.compute.manager [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1690.185953] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1690.186438] env[69227]: DEBUG nova.compute.manager [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1690.186540] env[69227]: DEBUG nova.network.neutron [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1690.636074] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1690.713069] env[69227]: DEBUG nova.network.neutron [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.155810] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1691.156124] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1691.157739] env[69227]: INFO nova.compute.claims [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1691.215965] env[69227]: INFO nova.compute.manager [-] [instance: 40a459c7-657d-40db-aa78-d16af085a3ee] Took 1.03 seconds to deallocate network for instance. [ 1691.301744] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1691.301972] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1692.252718] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5b847ac5-0100-4b3d-b452-7120c91c92f0 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "40a459c7-657d-40db-aa78-d16af085a3ee" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.120s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1692.380877] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a17a1f99-47a1-4f87-b5fc-ea0c4af24120 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.392076] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afd20f0-69b0-47bc-b702-3bc7d4b82c1b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.424451] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c680c9-9b74-4173-b4d3-2781ccd0c424 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.435374] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8707e6d5-1416-4f00-ae93-a75e116b3d84 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.451754] env[69227]: DEBUG nova.compute.provider_tree [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1692.954112] env[69227]: DEBUG nova.scheduler.client.report [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1693.458964] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1693.459538] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1693.926623] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1693.926869] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1693.964834] env[69227]: DEBUG nova.compute.utils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1693.966270] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Not allocating networking since 'none' was specified. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1694.468506] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1695.478661] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1695.502968] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1695.503233] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1695.503389] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1695.503573] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1695.503719] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1695.503863] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1695.504104] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1695.504285] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1695.504458] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1695.504619] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1695.504791] env[69227]: DEBUG nova.virt.hardware [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1695.505683] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa490961-6930-422e-9d15-b51446af8320 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.514135] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c7ce742-da03-46a1-ab1a-2541cd79bccc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.526850] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance VIF info [] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1695.532254] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Creating folder: Project (854fbfd3a3b9470d80cc144f85c341dd). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1695.532537] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-593d7a43-3b56-4971-8d26-ce89f60c478e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.542216] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Created folder: Project (854fbfd3a3b9470d80cc144f85c341dd) in parent group-v694623. [ 1695.542388] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Creating folder: Instances. Parent ref: group-v694725. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1695.542589] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc49b2cf-c6c0-4f07-b0b4-fa718c25fd35 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.552014] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Created folder: Instances in parent group-v694725. [ 1695.552240] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1695.552416] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1695.552594] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f426f3b-f58f-4789-a8c0-2a6331a2c928 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.567243] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1695.567243] env[69227]: value = "task-3475152" [ 1695.567243] env[69227]: _type = "Task" [ 1695.567243] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.573717] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475152, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.077417] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475152, 'name': CreateVM_Task, 'duration_secs': 0.340364} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.078241] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1696.078241] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1696.078389] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1696.079029] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1696.079029] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2395592f-1708-4295-9de0-1178c7c222b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.083167] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for the task: (returnval){ [ 1696.083167] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52ea1b90-be72-2fcf-7f53-45b3161027a4" [ 1696.083167] env[69227]: _type = "Task" [ 1696.083167] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.094076] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52ea1b90-be72-2fcf-7f53-45b3161027a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.592858] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1696.593132] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1696.593331] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.427880] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.427276] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1706.426864] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1707.423304] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.427376] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.427723] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.427772] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1708.427922] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1709.427778] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1709.428084] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1709.428182] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1709.935375] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.935566] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.935620] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.935747] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.935865] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.935979] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.936128] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.936248] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.936363] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.936476] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1709.936592] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1709.936883] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1710.440380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1710.440380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1710.440728] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1710.440728] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1710.441589] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81a17a5-a202-4635-a438-de4630245409 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.449475] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870e7973-a876-4256-9e14-c6a1e30f722b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.463076] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3508088f-9ae8-4792-b4d0-1849abdc4377 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.469668] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e11c09a7-09c7-479f-a6a7-0be49d4498aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.498144] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1710.498307] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1710.498522] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1711.530246] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 891a992b-5cbb-404e-8225-3ada55327def actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530529] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530529] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530621] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530735] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530848] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.530958] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.531081] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.531228] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1711.531347] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1712.034202] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1712.537932] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 598e8def-9627-4bd6-860b-50370c98b23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1713.040879] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1713.544330] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.049051] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1714.049051] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1714.049051] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1714.203426] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09aa3ef-7a47-4451-9bca-8327f1564d0f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.210480] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f27839b6-e225-4cb5-bee1-cd3d0a9e3c9a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.239015] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c129bd4-a99d-42ed-b544-260ee7b09827 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.245775] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e76049-e603-4f7f-89b0-2654c93afc1e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.259043] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1714.762440] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1715.267888] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1715.268122] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.770s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1720.039471] env[69227]: WARNING oslo_vmware.rw_handles [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1720.039471] env[69227]: ERROR oslo_vmware.rw_handles [ 1720.040166] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1720.042013] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1720.042267] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Copying Virtual Disk [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/10632556-4e8a-430f-966c-411b07f90c68/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1720.042546] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-48882bba-7c8e-42f6-8b40-26081ce7c07e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.050423] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1720.050423] env[69227]: value = "task-3475153" [ 1720.050423] env[69227]: _type = "Task" [ 1720.050423] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.058130] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.560749] env[69227]: DEBUG oslo_vmware.exceptions [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1720.561046] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1720.561674] env[69227]: ERROR nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1720.561674] env[69227]: Faults: ['InvalidArgument'] [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] Traceback (most recent call last): [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] yield resources [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self.driver.spawn(context, instance, image_meta, [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self._fetch_image_if_missing(context, vi) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] image_cache(vi, tmp_image_ds_loc) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] vm_util.copy_virtual_disk( [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] session._wait_for_task(vmdk_copy_task) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return self.wait_for_task(task_ref) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return evt.wait() [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] result = hub.switch() [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return self.greenlet.switch() [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self.f(*self.args, **self.kw) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] raise exceptions.translate_fault(task_info.error) [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] Faults: ['InvalidArgument'] [ 1720.561674] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] [ 1720.562644] env[69227]: INFO nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Terminating instance [ 1720.563428] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1720.563634] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.563873] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c33f3989-c288-4fa3-a91d-e0aa862bfcc5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.566156] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1720.566350] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1720.567077] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599ce34a-47c0-46b2-8431-e1280317f17c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.573626] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1720.574534] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bcdfde8-a6bb-4158-9123-2b71d6d634fd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.575854] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.576037] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1720.576823] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f5f28087-3ef5-426d-a29b-52a2415a1d8a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.581544] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for the task: (returnval){ [ 1720.581544] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5217f1a8-0041-dbbb-9223-7e293434a4ef" [ 1720.581544] env[69227]: _type = "Task" [ 1720.581544] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.589493] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5217f1a8-0041-dbbb-9223-7e293434a4ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.641141] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1720.641444] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1720.641695] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleting the datastore file [datastore2] 891a992b-5cbb-404e-8225-3ada55327def {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1720.642032] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b274e40-adb2-4b4f-a5ad-c75ce401bb03 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.648504] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for the task: (returnval){ [ 1720.648504] env[69227]: value = "task-3475155" [ 1720.648504] env[69227]: _type = "Task" [ 1720.648504] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.655683] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475155, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.092052] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1721.092400] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Creating directory with path [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1721.092533] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5929ae00-0856-4325-96bf-a7fadcad4138 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.103072] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Created directory with path [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1721.103261] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Fetch image to [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1721.103433] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1721.104125] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e9bd2b-8454-46b8-963c-4e092f315bf2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.110483] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448c2048-0bde-413a-9ca1-064a6d3c1f59 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.119135] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159740b3-7ba9-4b6e-ad15-dc4fe031a9e0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.152564] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07422b84-3d7e-428d-b22f-d8556a4897ed {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.158967] env[69227]: DEBUG oslo_vmware.api [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Task: {'id': task-3475155, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064927} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.160487] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1721.160553] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1721.160675] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1721.160844] env[69227]: INFO nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1721.162603] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-46ca385e-cb8e-4aa0-bd97-9c19f93f04e9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.164427] env[69227]: DEBUG nova.compute.claims [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1721.164604] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1721.164863] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1721.186150] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1721.235453] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1721.298420] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1721.298621] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1721.823589] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0e5aa4-4eb7-4e7c-959d-8f64a791837b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.831478] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5024c130-47b9-4995-8931-9c766e8d344e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.861494] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bc00baa-d427-485a-b952-b5951edea5ce {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.868343] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ec00bc-77ec-4267-85e5-32cc0c7d25e1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.881998] env[69227]: DEBUG nova.compute.provider_tree [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.384823] env[69227]: DEBUG nova.scheduler.client.report [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1722.890800] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.726s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1722.891443] env[69227]: ERROR nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.891443] env[69227]: Faults: ['InvalidArgument'] [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] Traceback (most recent call last): [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self.driver.spawn(context, instance, image_meta, [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self._fetch_image_if_missing(context, vi) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] image_cache(vi, tmp_image_ds_loc) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] vm_util.copy_virtual_disk( [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] session._wait_for_task(vmdk_copy_task) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return self.wait_for_task(task_ref) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return evt.wait() [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] result = hub.switch() [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] return self.greenlet.switch() [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] self.f(*self.args, **self.kw) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] raise exceptions.translate_fault(task_info.error) [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] Faults: ['InvalidArgument'] [ 1722.891443] env[69227]: ERROR nova.compute.manager [instance: 891a992b-5cbb-404e-8225-3ada55327def] [ 1722.892528] env[69227]: DEBUG nova.compute.utils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1722.893809] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Build of instance 891a992b-5cbb-404e-8225-3ada55327def was re-scheduled: A specified parameter was not correct: fileType [ 1722.893809] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1722.894423] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1722.894423] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1722.894606] env[69227]: DEBUG nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1722.894762] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1723.702762] env[69227]: DEBUG nova.network.neutron [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.206574] env[69227]: INFO nova.compute.manager [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Took 1.31 seconds to deallocate network for instance. [ 1725.236808] env[69227]: INFO nova.scheduler.client.report [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Deleted allocations for instance 891a992b-5cbb-404e-8225-3ada55327def [ 1725.746380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7677c8fe-18fa-42cc-9e92-ba2201e037ba tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 509.307s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1725.746706] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 313.077s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1725.746972] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Acquiring lock "891a992b-5cbb-404e-8225-3ada55327def-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1725.747145] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1725.747349] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1725.749135] env[69227]: INFO nova.compute.manager [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Terminating instance [ 1725.750753] env[69227]: DEBUG nova.compute.manager [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1725.750944] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1725.751241] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a2df8cc6-78f2-4b87-a982-faedc2c45a7c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.760404] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133ccaab-09f0-4325-8fe3-53e85c86bd9f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.791065] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 891a992b-5cbb-404e-8225-3ada55327def could not be found. [ 1725.791065] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1725.791065] env[69227]: INFO nova.compute.manager [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1725.791065] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1725.791701] env[69227]: DEBUG nova.compute.manager [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1725.791701] env[69227]: DEBUG nova.network.neutron [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1726.249912] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1726.310919] env[69227]: DEBUG nova.network.neutron [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.770413] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1726.770692] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1726.772152] env[69227]: INFO nova.compute.claims [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1726.812892] env[69227]: INFO nova.compute.manager [-] [instance: 891a992b-5cbb-404e-8225-3ada55327def] Took 1.02 seconds to deallocate network for instance. [ 1727.837658] env[69227]: DEBUG oslo_concurrency.lockutils [None req-3568aee3-fbcc-4cc2-852f-c01b3689c27f tempest-VolumesAdminNegativeTest-661681212 tempest-VolumesAdminNegativeTest-661681212-project-member] Lock "891a992b-5cbb-404e-8225-3ada55327def" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.091s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1727.931206] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceaeb164-c724-4034-9fe1-90af8ac13fcf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.938732] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cbdb0d-8360-42e2-afc0-8a155a58ba31 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.967824] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1bcefb-c4d2-41b3-8ff4-5f78a6d7382a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.974520] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d0eb50-1164-4322-8495-cd4b22648bdf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.988786] env[69227]: DEBUG nova.compute.provider_tree [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.492011] env[69227]: DEBUG nova.scheduler.client.report [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1728.997065] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.226s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1728.997544] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1729.502437] env[69227]: DEBUG nova.compute.utils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1729.503817] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Not allocating networking since 'none' was specified. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1730.005620] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1731.017052] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1731.041969] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1731.042250] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1731.042411] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1731.042592] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1731.042736] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1731.042877] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1731.043093] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1731.043256] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1731.043423] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1731.043580] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1731.043751] env[69227]: DEBUG nova.virt.hardware [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1731.044618] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6daca34f-0c1e-4dd1-99b2-952bac4e070a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.052288] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503dbf1b-7c28-49e5-b4f3-14d4853fa1c2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.071011] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance VIF info [] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1731.080497] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Creating folder: Project (03048ef06eec4c5f8ffc29f31d0d7de5). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1731.080794] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a3802787-1a12-47b2-bc67-88065936596c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.091047] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Created folder: Project (03048ef06eec4c5f8ffc29f31d0d7de5) in parent group-v694623. [ 1731.091047] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Creating folder: Instances. Parent ref: group-v694728. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1731.091185] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eef33869-21cf-4762-8af8-558074c96f6c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.099249] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Created folder: Instances in parent group-v694728. [ 1731.099469] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1731.099650] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1731.099834] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7c84f4d3-ccbb-4fec-bf24-190fa8bfa746 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.115387] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1731.115387] env[69227]: value = "task-3475158" [ 1731.115387] env[69227]: _type = "Task" [ 1731.115387] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.122178] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475158, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.625826] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475158, 'name': CreateVM_Task, 'duration_secs': 0.251641} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.625990] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1731.626454] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.626622] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1731.626933] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1731.627218] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2964e54-7c1a-4157-8338-744583ee8370 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.631468] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for the task: (returnval){ [ 1731.631468] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5220cc7f-90a9-e6f2-6882-ad0ababab557" [ 1731.631468] env[69227]: _type = "Task" [ 1731.631468] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.638668] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5220cc7f-90a9-e6f2-6882-ad0ababab557, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.141655] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1732.142026] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1732.142165] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.988604] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1751.515689] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "5539b326-2f24-45b7-874a-edc484e82267" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1751.516023] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1766.758472] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1766.758767] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1767.423163] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.427342] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.427738] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.427738] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1768.427882] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1769.427371] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.427755] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1769.931058] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1769.931337] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1769.931506] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1769.931663] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1769.932570] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8147d730-1167-4103-ac8d-22bc6e67ede8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.936027] env[69227]: WARNING oslo_vmware.rw_handles [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1769.936027] env[69227]: ERROR oslo_vmware.rw_handles [ 1769.936537] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1769.938724] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1769.938955] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Copying Virtual Disk [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/420217e4-c87b-47ce-bd65-707441e81101/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1769.939203] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4f27f0a-d650-46bf-81fd-a7121378ee02 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.946861] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b4c2eb-67b5-4e29-99a6-6b31ba5dc6d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.951685] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for the task: (returnval){ [ 1769.951685] env[69227]: value = "task-3475159" [ 1769.951685] env[69227]: _type = "Task" [ 1769.951685] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.963151] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b44669-da23-48f1-9c9c-dd1b63f35c6e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.967839] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Task: {'id': task-3475159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.971692] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbae086a-d279-4c33-8fad-ab1a587c8999 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.000382] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180960MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1770.000523] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1770.000728] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1770.462696] env[69227]: DEBUG oslo_vmware.exceptions [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1770.463076] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1770.463489] env[69227]: ERROR nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1770.463489] env[69227]: Faults: ['InvalidArgument'] [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Traceback (most recent call last): [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] yield resources [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self.driver.spawn(context, instance, image_meta, [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self._fetch_image_if_missing(context, vi) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] image_cache(vi, tmp_image_ds_loc) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] vm_util.copy_virtual_disk( [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] session._wait_for_task(vmdk_copy_task) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return self.wait_for_task(task_ref) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return evt.wait() [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] result = hub.switch() [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return self.greenlet.switch() [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self.f(*self.args, **self.kw) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] raise exceptions.translate_fault(task_info.error) [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Faults: ['InvalidArgument'] [ 1770.463489] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] [ 1770.464670] env[69227]: INFO nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Terminating instance [ 1770.465279] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1770.465490] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1770.465748] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f346e94b-0e37-4723-8933-dca02971ad6b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.468087] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1770.468282] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1770.469013] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1a71a5-58c2-47a4-af64-c4489a197f55 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.475993] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1770.476250] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3b5bf1c4-2aa8-41ae-a4bd-32fcbd1e87bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.478544] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1770.478715] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1770.479668] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eb6776a-4fee-4061-b77b-a3f8dfc96e89 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.484231] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for the task: (returnval){ [ 1770.484231] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b135dd-eace-5d98-6732-9f33b4d34dc0" [ 1770.484231] env[69227]: _type = "Task" [ 1770.484231] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.493558] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b135dd-eace-5d98-6732-9f33b4d34dc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.568208] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1770.568427] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1770.568615] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Deleting the datastore file [datastore2] af538b0d-b8c6-4f93-81e7-8f27b8a96735 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1770.568888] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fc60a2a-6c69-4bc9-a270-068a101263cf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.574775] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for the task: (returnval){ [ 1770.574775] env[69227]: value = "task-3475161" [ 1770.574775] env[69227]: _type = "Task" [ 1770.574775] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.582665] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Task: {'id': task-3475161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.994818] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1770.995083] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Creating directory with path [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1770.995321] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3987d45-12c2-40a1-8ccb-26cf96dfc301 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.006014] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Created directory with path [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1771.006217] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Fetch image to [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1771.006387] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1771.007089] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56df2976-ae39-4e03-b35f-5420c689345d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.016485] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a7691ac-6a14-4bd4-a0ca-ea3bfd7d0c17 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.025702] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298ad48f-6913-4385-afe9-7323307a419d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.057530] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6593a746-9e4e-49a4-9b2b-284b35d904b2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.062783] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fd7724a5-a326-40d2-987f-5c6642e38904 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.083093] env[69227]: DEBUG oslo_vmware.api [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Task: {'id': task-3475161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073591} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.084490] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1771.084687] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1771.084859] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1771.085042] env[69227]: INFO nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1771.086706] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1771.088780] env[69227]: DEBUG nova.compute.claims [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1771.088942] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1771.108032] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108186] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108313] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108432] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108565] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108657] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108770] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108883] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.108992] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.109117] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1771.136593] env[69227]: DEBUG oslo_vmware.rw_handles [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1771.196367] env[69227]: DEBUG oslo_vmware.rw_handles [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1771.196575] env[69227]: DEBUG oslo_vmware.rw_handles [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1771.614796] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 598e8def-9627-4bd6-860b-50370c98b23b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1772.117685] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1772.620241] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.124449] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.627456] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1773.627863] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1773.627917] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1773.644330] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1773.659946] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1773.660135] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1773.669960] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1773.686650] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1773.848435] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e71e46-f151-48f6-a669-eec6a9dfe157 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.856307] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6850e14-f106-4cd8-9d7f-e8ef817dc3a3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.885932] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b996515-b69d-4df4-864b-61e26f8b58e5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.892468] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c554273c-bc1b-4622-8f0c-fe3945cabecb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.904692] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1774.407525] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1774.913032] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1774.913032] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.912s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1774.913392] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 3.824s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1774.915826] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.915968] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 1775.422062] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 0 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 1775.422303] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1775.422597] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 1775.581872] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3305620a-8bc7-4604-a575-5efa54bb3ee7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.589861] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b196ac-844d-4133-bf3e-ff99cfdce641 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.620484] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fe7f15-5de4-4063-8dbe-598f8de24033 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.627376] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfac93c2-7afd-43ea-95a5-a7809d83b9d0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.640341] env[69227]: DEBUG nova.compute.provider_tree [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1775.928406] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1776.143696] env[69227]: DEBUG nova.scheduler.client.report [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1776.649221] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.736s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1776.649796] env[69227]: ERROR nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.649796] env[69227]: Faults: ['InvalidArgument'] [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Traceback (most recent call last): [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self.driver.spawn(context, instance, image_meta, [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self._fetch_image_if_missing(context, vi) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] image_cache(vi, tmp_image_ds_loc) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] vm_util.copy_virtual_disk( [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] session._wait_for_task(vmdk_copy_task) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return self.wait_for_task(task_ref) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return evt.wait() [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] result = hub.switch() [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] return self.greenlet.switch() [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] self.f(*self.args, **self.kw) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] raise exceptions.translate_fault(task_info.error) [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Faults: ['InvalidArgument'] [ 1776.649796] env[69227]: ERROR nova.compute.manager [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] [ 1776.650877] env[69227]: DEBUG nova.compute.utils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1776.652268] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Build of instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 was re-scheduled: A specified parameter was not correct: fileType [ 1776.652268] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1776.652632] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1776.652811] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1776.652974] env[69227]: DEBUG nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1776.653149] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1777.431050] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1777.431050] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1777.431050] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1777.435739] env[69227]: DEBUG nova.network.neutron [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.934804] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.934954] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935100] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935227] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935352] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935471] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935692] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935827] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.935947] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1777.936123] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1777.937941] env[69227]: INFO nova.compute.manager [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Took 1.28 seconds to deallocate network for instance. [ 1778.972484] env[69227]: INFO nova.scheduler.client.report [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Deleted allocations for instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 [ 1779.482070] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5a1a95ab-3b64-4204-bcd8-33dbce6839ed tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 562.266s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1779.483694] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 365.828s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1779.483951] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Acquiring lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1779.484176] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1779.484347] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1779.486755] env[69227]: INFO nova.compute.manager [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Terminating instance [ 1779.488753] env[69227]: DEBUG nova.compute.manager [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1779.488951] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1779.489221] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c81983c-967b-4dcb-b97f-c8b59a082a63 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.498633] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182804e3-d4fa-4f46-b0e1-ac5aec581008 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.527166] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance af538b0d-b8c6-4f93-81e7-8f27b8a96735 could not be found. [ 1779.527347] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1779.527522] env[69227]: INFO nova.compute.manager [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1779.527799] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1779.528027] env[69227]: DEBUG nova.compute.manager [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1779.528121] env[69227]: DEBUG nova.network.neutron [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1779.989692] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1780.044546] env[69227]: DEBUG nova.network.neutron [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.512286] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1780.512557] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1780.514088] env[69227]: INFO nova.compute.claims [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1780.547352] env[69227]: INFO nova.compute.manager [-] [instance: af538b0d-b8c6-4f93-81e7-8f27b8a96735] Took 1.02 seconds to deallocate network for instance. [ 1780.928073] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1781.570822] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87730611-22d2-4c73-a104-440d21c84947 tempest-ServerGroupTestJSON-1729656755 tempest-ServerGroupTestJSON-1729656755-project-member] Lock "af538b0d-b8c6-4f93-81e7-8f27b8a96735" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.087s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1781.674771] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49019859-7e6e-4ea4-b1a2-469e0a9fc060 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.682245] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16060319-7263-457e-89d8-ac4fa3c73e94 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.712345] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac955a0c-632f-4014-8ad0-4709a69526f3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.719091] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3f0ba4-147a-4209-8190-f20c1b648c66 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.732033] env[69227]: DEBUG nova.compute.provider_tree [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1782.235359] env[69227]: DEBUG nova.scheduler.client.report [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1782.740191] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.227s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1782.740590] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1783.245630] env[69227]: DEBUG nova.compute.utils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1783.247028] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1783.247199] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1783.282948] env[69227]: DEBUG nova.policy [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'dd4b9dd48d424728865893bd9c5ceaf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1c02b9ad136e446fa60e00bf4c91f22a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1783.617618] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Successfully created port: 17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1783.750097] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1784.255251] env[69227]: INFO nova.virt.block_device [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Booting with volume 27a99270-f0aa-4783-8535-bc3056edfc8e at /dev/sda [ 1784.299762] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-85f48102-55f4-4c06-9224-50d2721c5284 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.309681] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72ea5bc-ccca-41a0-9de5-6dfe5b6ff2d6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.337130] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dba187cf-abf6-47bb-936f-340b47512e36 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.344224] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba8f7b8-0240-4b6c-a9d5-b28d7b91a057 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.371166] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e9b691-9775-4713-8a5d-2ac613a462ac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.376925] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d187cb-a257-4ca5-804f-2678af8752a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.389526] env[69227]: DEBUG nova.virt.block_device [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating existing volume attachment record: 24e7305e-a852-4252-aa1c-82c2a6c802c6 {{(pid=69227) _volume_attach /opt/stack/nova/nova/virt/block_device.py:665}} [ 1784.968203] env[69227]: DEBUG nova.compute.manager [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Received event network-vif-plugged-17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1784.968461] env[69227]: DEBUG oslo_concurrency.lockutils [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] Acquiring lock "598e8def-9627-4bd6-860b-50370c98b23b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1784.968797] env[69227]: DEBUG oslo_concurrency.lockutils [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] Lock "598e8def-9627-4bd6-860b-50370c98b23b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1784.968797] env[69227]: DEBUG oslo_concurrency.lockutils [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] Lock "598e8def-9627-4bd6-860b-50370c98b23b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1784.968984] env[69227]: DEBUG nova.compute.manager [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] No waiting events found dispatching network-vif-plugged-17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1784.969128] env[69227]: WARNING nova.compute.manager [req-a73d8a8d-3757-463a-a159-5a519b158069 req-a1dc4284-e70a-49ed-8212-61e1b19e8db0 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Received unexpected event network-vif-plugged-17d96a99-82d2-4f7e-b6d1-a67e016850b7 for instance with vm_state building and task_state block_device_mapping. [ 1785.055739] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Successfully updated port: 17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1785.559189] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.559345] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquired lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1785.559502] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1786.093013] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1786.252550] env[69227]: DEBUG nova.network.neutron [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating instance_info_cache with network_info: [{"id": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "address": "fa:16:3e:d3:30:15", "network": {"id": "1c0b82c9-02f9-4e21-8bf9-5126ce41e1ab", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1353040367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c02b9ad136e446fa60e00bf4c91f22a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d96a99-82", "ovs_interfaceid": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.489853] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1786.490413] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1786.490621] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1786.490773] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1786.490957] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1786.491116] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1786.491264] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1786.491462] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1786.491613] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1786.491774] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1786.491935] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1786.492124] env[69227]: DEBUG nova.virt.hardware [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1786.492977] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b5dd69-e615-4d1a-aad6-6ec1d964b8e0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.500926] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92e0561-f43a-436b-b003-93f0427e2b82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.754977] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Releasing lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1786.755372] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Instance network_info: |[{"id": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "address": "fa:16:3e:d3:30:15", "network": {"id": "1c0b82c9-02f9-4e21-8bf9-5126ce41e1ab", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1353040367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c02b9ad136e446fa60e00bf4c91f22a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d96a99-82", "ovs_interfaceid": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1786.755920] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:30:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17d96a99-82d2-4f7e-b6d1-a67e016850b7', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1786.763288] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Creating folder: Project (1c02b9ad136e446fa60e00bf4c91f22a). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1786.763561] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c11f3d44-ca55-4637-a1c5-15cbebac054b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.778402] env[69227]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1786.778503] env[69227]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69227) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1786.779287] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Folder already exists: Project (1c02b9ad136e446fa60e00bf4c91f22a). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1786.779287] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Creating folder: Instances. Parent ref: group-v694715. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1786.779541] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec4a6605-ef8a-4aed-9a96-408e385008fe {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.788847] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Created folder: Instances in parent group-v694715. [ 1786.789079] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1786.789275] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1786.789483] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f68038c-9dde-4b7f-bff6-ffa8ecfacc29 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.808538] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1786.808538] env[69227]: value = "task-3475164" [ 1786.808538] env[69227]: _type = "Task" [ 1786.808538] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.815724] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475164, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.996249] env[69227]: DEBUG nova.compute.manager [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Received event network-changed-17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1786.996488] env[69227]: DEBUG nova.compute.manager [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Refreshing instance network info cache due to event network-changed-17d96a99-82d2-4f7e-b6d1-a67e016850b7. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1786.996743] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] Acquiring lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.996921] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] Acquired lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1786.997136] env[69227]: DEBUG nova.network.neutron [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Refreshing network info cache for port 17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1787.318292] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475164, 'name': CreateVM_Task, 'duration_secs': 0.30884} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.318601] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1787.319151] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-694718', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'name': 'volume-27a99270-f0aa-4783-8535-bc3056edfc8e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '598e8def-9627-4bd6-860b-50370c98b23b', 'attached_at': '', 'detached_at': '', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'serial': '27a99270-f0aa-4783-8535-bc3056edfc8e'}, 'boot_index': 0, 'attachment_id': '24e7305e-a852-4252-aa1c-82c2a6c802c6', 'disk_bus': None, 'mount_device': '/dev/sda', 'device_type': None, 'guest_format': None, 'delete_on_termination': True, 'volume_type': None}], 'swap': None} {{(pid=69227) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1787.319378] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Root volume attach. Driver type: vmdk {{(pid=69227) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1787.320148] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d653916-4040-41f9-b5d5-de5e2f5cdec4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.327744] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8584b3f4-5c14-44a0-acad-805e8641885a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.333503] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f466851c-4cd4-41b7-98fe-3df5612b85e4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.339332] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-3bb8224e-d346-477a-b67d-31c7c4818a4e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.346510] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1787.346510] env[69227]: value = "task-3475165" [ 1787.346510] env[69227]: _type = "Task" [ 1787.346510] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.353718] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.695825] env[69227]: DEBUG nova.network.neutron [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updated VIF entry in instance network info cache for port 17d96a99-82d2-4f7e-b6d1-a67e016850b7. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1787.696362] env[69227]: DEBUG nova.network.neutron [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating instance_info_cache with network_info: [{"id": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "address": "fa:16:3e:d3:30:15", "network": {"id": "1c0b82c9-02f9-4e21-8bf9-5126ce41e1ab", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1353040367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c02b9ad136e446fa60e00bf4c91f22a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d96a99-82", "ovs_interfaceid": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1787.857970] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 42%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.093318] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.199966] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc371e45-63fd-4dbb-aebc-1ffe7d81331e req-ff87660c-d2d1-4141-b0ed-41d738465c6b service nova] Releasing lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1788.357375] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 56%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.598196] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 1788.598196] env[69227]: value = "domain-c8" [ 1788.598196] env[69227]: _type = "ClusterComputeResource" [ 1788.598196] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1788.599352] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b570f827-79ab-4871-8f50-67ee609d43b4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.617024] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 10 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1788.617312] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.617545] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 20578860-89f7-4e25-9ccd-ccc39fa5e71f {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.617687] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.617903] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 1b975f6d-7e12-44cd-99c4-c480edc286bd {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.618068] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 1397d96c-8a1d-4940-9b58-148435f12497 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.618413] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid dcabb6a4-2b08-47df-8687-18431ee85153 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.618646] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 1c3529ac-4abf-46fe-8b40-1e4222e2150a {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.618900] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 53fae914-75b0-414e-b3ce-9d8be3462039 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.619167] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 9944282c-d21a-40b2-9143-f76c288860ef {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.619518] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 598e8def-9627-4bd6-860b-50370c98b23b {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 1788.619997] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.620327] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.620565] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.620752] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.620954] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "1397d96c-8a1d-4940-9b58-148435f12497" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.621144] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "dcabb6a4-2b08-47df-8687-18431ee85153" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.621371] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.621567] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "53fae914-75b0-414e-b3ce-9d8be3462039" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.621785] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "9944282c-d21a-40b2-9143-f76c288860ef" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.621950] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "598e8def-9627-4bd6-860b-50370c98b23b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1788.858029] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 71%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.358039] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 86%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.857990] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task} progress is 97%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.357797] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475165, 'name': RelocateVM_Task, 'duration_secs': 2.990437} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.358105] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Volume attach. Driver type: vmdk {{(pid=69227) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1790.358340] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-694718', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'name': 'volume-27a99270-f0aa-4783-8535-bc3056edfc8e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '598e8def-9627-4bd6-860b-50370c98b23b', 'attached_at': '', 'detached_at': '', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'serial': '27a99270-f0aa-4783-8535-bc3056edfc8e'} {{(pid=69227) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1790.359075] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e37cef7-3594-4137-b098-c25c3501da01 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.374812] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c4312d-9ed2-4614-9f4d-812740aca7f2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.396497] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] volume-27a99270-f0aa-4783-8535-bc3056edfc8e/volume-27a99270-f0aa-4783-8535-bc3056edfc8e.vmdk or device None with type thin {{(pid=69227) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1790.396731] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-600e3904-08c5-415f-9684-c0b3f44f4a2e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.416122] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1790.416122] env[69227]: value = "task-3475166" [ 1790.416122] env[69227]: _type = "Task" [ 1790.416122] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.423598] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.925384] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475166, 'name': ReconfigVM_Task, 'duration_secs': 0.258972} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.925648] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Reconfigured VM instance instance-0000005d to attach disk [datastore2] volume-27a99270-f0aa-4783-8535-bc3056edfc8e/volume-27a99270-f0aa-4783-8535-bc3056edfc8e.vmdk or device None with type thin {{(pid=69227) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.930237] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2669eef3-7e63-47e9-9a77-7257aa291bfb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.944855] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1790.944855] env[69227]: value = "task-3475167" [ 1790.944855] env[69227]: _type = "Task" [ 1790.944855] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.953970] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.454904] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475167, 'name': ReconfigVM_Task, 'duration_secs': 0.142461} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.455200] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-694718', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'name': 'volume-27a99270-f0aa-4783-8535-bc3056edfc8e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '598e8def-9627-4bd6-860b-50370c98b23b', 'attached_at': '', 'detached_at': '', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'serial': '27a99270-f0aa-4783-8535-bc3056edfc8e'} {{(pid=69227) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1791.455782] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dc46585e-c95e-4e46-8a51-b716f02a617b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.462141] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1791.462141] env[69227]: value = "task-3475168" [ 1791.462141] env[69227]: _type = "Task" [ 1791.462141] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.469901] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475168, 'name': Rename_Task} progress is 5%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.972321] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475168, 'name': Rename_Task, 'duration_secs': 0.132428} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.972591] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Powering on the VM {{(pid=69227) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 1791.972837] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13d5beec-0c01-43d8-bb24-365fbe5ade12 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.979216] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1791.979216] env[69227]: value = "task-3475169" [ 1791.979216] env[69227]: _type = "Task" [ 1791.979216] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.986411] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.489558] env[69227]: DEBUG oslo_vmware.api [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475169, 'name': PowerOnVM_Task, 'duration_secs': 0.47786} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.489851] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Powered on the VM {{(pid=69227) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 1792.490891] env[69227]: INFO nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Took 6.00 seconds to spawn the instance on the hypervisor. [ 1792.490891] env[69227]: DEBUG nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Checking state {{(pid=69227) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1792.491201] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059d4435-0da5-4f61-b8df-820779c4ff2c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.010547] env[69227]: INFO nova.compute.manager [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Took 12.52 seconds to build instance. [ 1793.513353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-577692f9-9cdd-43b7-8ddf-5cf8b7ccafd7 tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 155.361s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1793.514694] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "598e8def-9627-4bd6-860b-50370c98b23b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.893s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1793.514820] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1793.514997] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "598e8def-9627-4bd6-860b-50370c98b23b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1793.725927] env[69227]: DEBUG nova.compute.manager [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Received event network-changed-17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1793.726135] env[69227]: DEBUG nova.compute.manager [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Refreshing instance network info cache due to event network-changed-17d96a99-82d2-4f7e-b6d1-a67e016850b7. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1793.726360] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] Acquiring lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.726478] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] Acquired lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1793.726633] env[69227]: DEBUG nova.network.neutron [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Refreshing network info cache for port 17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1794.016225] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1794.539275] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1794.539571] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1794.541008] env[69227]: INFO nova.compute.claims [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1794.662376] env[69227]: DEBUG nova.network.neutron [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updated VIF entry in instance network info cache for port 17d96a99-82d2-4f7e-b6d1-a67e016850b7. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1794.662726] env[69227]: DEBUG nova.network.neutron [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating instance_info_cache with network_info: [{"id": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "address": "fa:16:3e:d3:30:15", "network": {"id": "1c0b82c9-02f9-4e21-8bf9-5126ce41e1ab", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1353040367-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1c02b9ad136e446fa60e00bf4c91f22a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17d96a99-82", "ovs_interfaceid": "17d96a99-82d2-4f7e-b6d1-a67e016850b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.165370] env[69227]: DEBUG oslo_concurrency.lockutils [req-cc7d80d3-a56a-40ad-8594-9db4d692513e req-eaad21a1-e49b-4107-abb6-426ffe9e0331 service nova] Releasing lock "refresh_cache-598e8def-9627-4bd6-860b-50370c98b23b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1795.723538] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28248070-45a1-4653-861e-059ae7f90d85 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.731140] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae332b9-51d8-41b3-813a-5cefb240a8b9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.761811] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381d88b3-eb57-4f79-a570-a822836da663 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.768152] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2527c3-b50b-425b-afbb-50cfdc75a247 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.780901] env[69227]: DEBUG nova.compute.provider_tree [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.284559] env[69227]: DEBUG nova.scheduler.client.report [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1796.790144] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1796.790739] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1797.296458] env[69227]: DEBUG nova.compute.utils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1797.298741] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Not allocating networking since 'none' was specified. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 1797.800680] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1798.810338] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1798.835950] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1798.836211] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1798.836368] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1798.836548] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1798.836692] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1798.836839] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1798.837057] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1798.837223] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1798.837389] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1798.837550] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1798.837723] env[69227]: DEBUG nova.virt.hardware [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1798.838618] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40dbd82c-4500-463f-b0fc-26acaf489d07 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.846209] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b88c7706-d061-403b-b795-1b13548fd64f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.858951] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance VIF info [] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1798.864519] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Creating folder: Project (70e67f1508e04b508b4404cde3650bf4). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1798.864757] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2b98dff-dd91-4d5b-a264-904b384ce445 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.873303] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Created folder: Project (70e67f1508e04b508b4404cde3650bf4) in parent group-v694623. [ 1798.873481] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Creating folder: Instances. Parent ref: group-v694733. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1798.873680] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f6484ca-a20b-4d9b-9065-1146611033e8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.881399] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Created folder: Instances in parent group-v694733. [ 1798.881619] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1798.881795] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1798.881976] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ae54010-ee2d-4915-8640-edb01543914c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.898117] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1798.898117] env[69227]: value = "task-3475172" [ 1798.898117] env[69227]: _type = "Task" [ 1798.898117] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.905016] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475172, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.407636] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475172, 'name': CreateVM_Task, 'duration_secs': 0.247468} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.407833] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1799.408295] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.408460] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1799.408821] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1799.409040] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-660de051-ec83-4858-a346-f4458eb868e3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.413323] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for the task: (returnval){ [ 1799.413323] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]525a54b7-f8b0-aeb6-4549-e2ba2811a846" [ 1799.413323] env[69227]: _type = "Task" [ 1799.413323] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.420747] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]525a54b7-f8b0-aeb6-4549-e2ba2811a846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.923370] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1799.923665] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1799.923834] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.114536] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "53fae914-75b0-414e-b3ce-9d8be3462039" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1808.367585] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "9944282c-d21a-40b2-9143-f76c288860ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1819.516433] env[69227]: WARNING oslo_vmware.rw_handles [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1819.516433] env[69227]: ERROR oslo_vmware.rw_handles [ 1819.517339] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1819.519119] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1819.519350] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Copying Virtual Disk [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/8f0d4fc0-9c6d-4379-baa3-4f7ea1c2d8d7/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1819.519631] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-02a242da-6584-48da-9129-8ecf9a32f03b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.529078] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for the task: (returnval){ [ 1819.529078] env[69227]: value = "task-3475173" [ 1819.529078] env[69227]: _type = "Task" [ 1819.529078] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.536435] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Task: {'id': task-3475173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.039796] env[69227]: DEBUG oslo_vmware.exceptions [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1820.040020] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1820.040624] env[69227]: ERROR nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1820.040624] env[69227]: Faults: ['InvalidArgument'] [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Traceback (most recent call last): [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] yield resources [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self.driver.spawn(context, instance, image_meta, [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self._fetch_image_if_missing(context, vi) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] image_cache(vi, tmp_image_ds_loc) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] vm_util.copy_virtual_disk( [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] session._wait_for_task(vmdk_copy_task) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return self.wait_for_task(task_ref) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return evt.wait() [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] result = hub.switch() [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return self.greenlet.switch() [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self.f(*self.args, **self.kw) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] raise exceptions.translate_fault(task_info.error) [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Faults: ['InvalidArgument'] [ 1820.040624] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] [ 1820.042346] env[69227]: INFO nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Terminating instance [ 1820.042487] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1820.042695] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1820.042932] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1645e15-dbf1-4442-9088-5117c90cfe5a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.046720] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1820.046913] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1820.047657] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ca08cd2-89c0-4762-aaeb-547d2aad78aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.051173] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1820.051347] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1820.052289] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed85663-4a44-46b4-9243-d40a8e101431 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.056075] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1820.056546] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fd80957f-9c68-4781-b6da-25cd1e2f720b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.058743] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for the task: (returnval){ [ 1820.058743] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522b5b85-f261-0000-25a8-89704d057f2c" [ 1820.058743] env[69227]: _type = "Task" [ 1820.058743] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.065858] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522b5b85-f261-0000-25a8-89704d057f2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.121818] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1820.122144] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1820.122406] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Deleting the datastore file [datastore2] cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1820.122745] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba966c4d-32c5-4352-8233-89ff6a43861f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.129311] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for the task: (returnval){ [ 1820.129311] env[69227]: value = "task-3475175" [ 1820.129311] env[69227]: _type = "Task" [ 1820.129311] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.136761] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Task: {'id': task-3475175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.569964] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1820.570384] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Creating directory with path [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1820.570515] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85dbeb7b-50de-4884-8014-800d60ff61c8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.581899] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Created directory with path [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1820.582145] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Fetch image to [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1820.582267] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1820.582970] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0c60e9-a11e-4939-846e-26c963e0a841 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.589299] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e78f1c-0a7b-4e83-ab7d-57aa2a3a8f4e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.597962] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feff2ed-af97-4c6d-8ae1-0826dad07521 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.628576] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5328c1ae-6a89-46f9-86d2-c42e19ed24d7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.346601] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-18450316-f80f-426a-a061-4c15d128faab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.348162] env[69227]: DEBUG oslo_vmware.api [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Task: {'id': task-3475175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085709} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.348306] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.348657] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1821.348657] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1821.348773] env[69227]: INFO nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1821.350808] env[69227]: DEBUG nova.compute.claims [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1821.350938] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1821.351177] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1821.368340] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1821.416943] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1821.477512] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1821.477708] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1821.955936] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1822.022088] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7b2f38e-96e5-4d34-8360-07dcd2bc6a3a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.031034] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77418d85-e6e7-4b19-b24f-a7d18cf8d5dd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.061185] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad5af02-7e9f-45d7-b47b-f8ebcd883993 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.069713] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525337b2-d578-40f6-ba68-3ebe28a391da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.081524] env[69227]: DEBUG nova.compute.provider_tree [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.585594] env[69227]: DEBUG nova.scheduler.client.report [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1823.091379] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.740s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1823.091610] env[69227]: ERROR nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1823.091610] env[69227]: Faults: ['InvalidArgument'] [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Traceback (most recent call last): [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self.driver.spawn(context, instance, image_meta, [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self._fetch_image_if_missing(context, vi) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] image_cache(vi, tmp_image_ds_loc) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] vm_util.copy_virtual_disk( [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] session._wait_for_task(vmdk_copy_task) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return self.wait_for_task(task_ref) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return evt.wait() [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] result = hub.switch() [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] return self.greenlet.switch() [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] self.f(*self.args, **self.kw) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] raise exceptions.translate_fault(task_info.error) [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Faults: ['InvalidArgument'] [ 1823.091610] env[69227]: ERROR nova.compute.manager [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] [ 1823.092534] env[69227]: DEBUG nova.compute.utils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1823.094962] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Build of instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 was re-scheduled: A specified parameter was not correct: fileType [ 1823.094962] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1823.095353] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1823.095525] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1823.095690] env[69227]: DEBUG nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1823.095851] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1823.861332] env[69227]: DEBUG nova.network.neutron [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.364572] env[69227]: INFO nova.compute.manager [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Took 1.27 seconds to deallocate network for instance. [ 1825.402389] env[69227]: INFO nova.scheduler.client.report [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Deleted allocations for instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 [ 1825.910691] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d4e20840-7f46-49cc-9274-0ce330b214d3 tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 563.569s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1825.912573] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 367.228s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1825.912803] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Acquiring lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1825.913072] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1825.913667] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1825.915801] env[69227]: INFO nova.compute.manager [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Terminating instance [ 1825.917679] env[69227]: DEBUG nova.compute.manager [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1825.917917] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1825.918314] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-050c56ae-35d8-4523-96a9-59c5b92e9718 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.927523] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b16fd1-7688-48ce-a70b-4a1b34599cd3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.958033] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc159ed8-ebf1-4c6d-8572-b78b48d9ea39 could not be found. [ 1825.958282] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1825.958469] env[69227]: INFO nova.compute.manager [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1825.958712] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1825.958936] env[69227]: DEBUG nova.compute.manager [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1825.959042] env[69227]: DEBUG nova.network.neutron [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1826.416593] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1826.426263] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1826.480597] env[69227]: DEBUG nova.network.neutron [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.948297] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1826.948683] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1826.950101] env[69227]: INFO nova.compute.claims [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1826.982950] env[69227]: INFO nova.compute.manager [-] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] Took 1.02 seconds to deallocate network for instance. [ 1828.014744] env[69227]: DEBUG oslo_concurrency.lockutils [None req-d06e3695-f8e6-46c5-94ea-b32aa5b195eb tempest-ListServersNegativeTestJSON-1172104691 tempest-ListServersNegativeTestJSON-1172104691-project-member] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.102s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1828.015654] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 39.396s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1828.015846] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc159ed8-ebf1-4c6d-8572-b78b48d9ea39] During sync_power_state the instance has a pending task (deleting). Skip. [ 1828.016056] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "cc159ed8-ebf1-4c6d-8572-b78b48d9ea39" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1828.188945] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025c65a1-f925-41e7-bbcd-117ab3636bb4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.195496] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6aad46-5687-417e-a20d-d9b4548e3a60 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.233573] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a47238-47f6-4475-bcdc-a033dfd21022 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.241687] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b6dfe-6b88-4595-a6fe-323b88228119 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.254787] env[69227]: DEBUG nova.compute.provider_tree [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.757636] env[69227]: DEBUG nova.scheduler.client.report [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1829.112640] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1829.112640] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1829.264610] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1829.265142] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1829.422674] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.428063] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.428063] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.428063] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1829.428063] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1829.774170] env[69227]: DEBUG nova.compute.utils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1829.774170] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1829.774170] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1829.856708] env[69227]: DEBUG nova.policy [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ffb2e003bb4247aeb8a9c8ce9c7f13cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '991c407526cf4eb7abcde2911220437e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1830.278081] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1830.410763] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Successfully created port: f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1830.427114] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1830.427335] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1830.932243] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1830.932603] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1830.932603] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1830.932711] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1830.933657] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df8436e7-ea3e-43a8-a9d7-a26c9492dec6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.943353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "84d5494a-c08b-45be-a35a-860e64fdf76f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1830.943584] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1830.948295] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d59e5d-aecc-4563-af5c-f2870f8d716c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.964154] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9215e7e0-231c-41f1-b68a-6825f67a6b8f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.970875] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-605bb0e5-619a-4f4e-9003-a1a340a858ec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.002839] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180737MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1831.003070] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1831.004284] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1831.289017] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1831.316206] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1831.316474] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1831.316794] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1831.316794] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1831.316946] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1831.317205] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1831.317434] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1831.317594] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1831.317901] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1831.317954] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1831.318113] env[69227]: DEBUG nova.virt.hardware [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1831.318975] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3738ba4-ee0a-4f71-b1ce-8d0bf380cb73 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.327261] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbfbef96-e2d1-4f2c-ac65-33da0f787801 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.918473] env[69227]: DEBUG nova.compute.manager [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Received event network-vif-plugged-f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1831.918674] env[69227]: DEBUG oslo_concurrency.lockutils [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] Acquiring lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1831.918881] env[69227]: DEBUG oslo_concurrency.lockutils [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1831.919380] env[69227]: DEBUG oslo_concurrency.lockutils [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1831.919606] env[69227]: DEBUG nova.compute.manager [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] No waiting events found dispatching network-vif-plugged-f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1831.919779] env[69227]: WARNING nova.compute.manager [req-4800c286-f8df-4192-82ab-2b0b09101f79 req-06d5d0bb-6624-42c4-bec4-a22baec2413a service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Received unexpected event network-vif-plugged-f020640c-2f98-40dc-9355-37d11e8dab98 for instance with vm_state building and task_state spawning. [ 1832.009631] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Successfully updated port: f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 598e8def-9627-4bd6-860b-50370c98b23b actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.040278] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1832.275734] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "598e8def-9627-4bd6-860b-50370c98b23b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1832.276063] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1832.276291] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "598e8def-9627-4bd6-860b-50370c98b23b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1832.276481] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1832.276652] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1832.280752] env[69227]: INFO nova.compute.manager [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Terminating instance [ 1832.282841] env[69227]: DEBUG nova.compute.manager [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1832.283068] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Powering off the VM {{(pid=69227) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 1832.283310] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14ec7dbd-300b-4706-b847-4fc849a26ec8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.291803] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1832.291803] env[69227]: value = "task-3475176" [ 1832.291803] env[69227]: _type = "Task" [ 1832.291803] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.303791] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475176, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.512835] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.513720] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1832.513720] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1832.541997] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1832.801543] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475176, 'name': PowerOffVM_Task, 'duration_secs': 0.164683} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.801802] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Powered off the VM {{(pid=69227) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 1832.801992] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Volume detach. Driver type: vmdk {{(pid=69227) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1832.802198] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-694718', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'name': 'volume-27a99270-f0aa-4783-8535-bc3056edfc8e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '598e8def-9627-4bd6-860b-50370c98b23b', 'attached_at': '', 'detached_at': '', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'serial': '27a99270-f0aa-4783-8535-bc3056edfc8e'} {{(pid=69227) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1832.802939] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa413e72-b34e-4499-a677-952e733a78da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.821621] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d2a6477-7cb4-42e0-9dc7-d5f1fdc85562 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.828968] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89a6eb1-e016-4f54-a898-65ce56adb67b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.846466] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f44b54d-1112-4bee-b17c-6c0992b8d087 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.860569] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] The volume has not been displaced from its original location: [datastore2] volume-27a99270-f0aa-4783-8535-bc3056edfc8e/volume-27a99270-f0aa-4783-8535-bc3056edfc8e.vmdk. No consolidation needed. {{(pid=69227) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1832.865827] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Reconfiguring VM instance instance-0000005d to detach disk 2000 {{(pid=69227) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1832.866097] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9bc0edb-ab7f-4bfd-b89a-1ef6d6405a90 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.883470] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1832.883470] env[69227]: value = "task-3475177" [ 1832.883470] env[69227]: _type = "Task" [ 1832.883470] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.891610] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475177, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.044533] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1833.069413] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1833.264276] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Updating instance_info_cache with network_info: [{"id": "f020640c-2f98-40dc-9355-37d11e8dab98", "address": "fa:16:3e:a0:0f:9b", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf020640c-2f", "ovs_interfaceid": "f020640c-2f98-40dc-9355-37d11e8dab98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.393477] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475177, 'name': ReconfigVM_Task, 'duration_secs': 0.152016} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.393791] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Reconfigured VM instance instance-0000005d to detach disk 2000 {{(pid=69227) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1833.398613] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9d7d84a-4132-4941-b6f7-05d26d285a31 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.414281] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1833.414281] env[69227]: value = "task-3475178" [ 1833.414281] env[69227]: _type = "Task" [ 1833.414281] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.426779] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475178, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.547335] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1833.766666] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1833.766990] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance network_info: |[{"id": "f020640c-2f98-40dc-9355-37d11e8dab98", "address": "fa:16:3e:a0:0f:9b", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf020640c-2f", "ovs_interfaceid": "f020640c-2f98-40dc-9355-37d11e8dab98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1833.767449] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:0f:9b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ace50835-5731-4c77-b6c0-3076d7b4aa21', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f020640c-2f98-40dc-9355-37d11e8dab98', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1833.774702] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1833.774903] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1833.775130] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2771359f-85bf-4aa7-87ab-29b3406cc754 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.795138] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1833.795138] env[69227]: value = "task-3475179" [ 1833.795138] env[69227]: _type = "Task" [ 1833.795138] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.804898] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475179, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.924848] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475178, 'name': ReconfigVM_Task, 'duration_secs': 0.157084} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.924848] env[69227]: DEBUG nova.virt.vmwareapi.volumeops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-694718', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'name': 'volume-27a99270-f0aa-4783-8535-bc3056edfc8e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '598e8def-9627-4bd6-860b-50370c98b23b', 'attached_at': '', 'detached_at': '', 'volume_id': '27a99270-f0aa-4783-8535-bc3056edfc8e', 'serial': '27a99270-f0aa-4783-8535-bc3056edfc8e'} {{(pid=69227) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1833.925013] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1833.925732] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b2051f-fa93-49b4-b839-f21b24228c1a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.932471] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1833.932817] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-854a3401-578f-457f-a0d4-9f2b8baace79 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.945988] env[69227]: DEBUG nova.compute.manager [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Received event network-changed-f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1833.946223] env[69227]: DEBUG nova.compute.manager [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Refreshing instance network info cache due to event network-changed-f020640c-2f98-40dc-9355-37d11e8dab98. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1833.946465] env[69227]: DEBUG oslo_concurrency.lockutils [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] Acquiring lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.946620] env[69227]: DEBUG oslo_concurrency.lockutils [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] Acquired lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1833.946801] env[69227]: DEBUG nova.network.neutron [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Refreshing network info cache for port f020640c-2f98-40dc-9355-37d11e8dab98 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1833.990040] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1833.990892] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1833.990892] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Deleting the datastore file [datastore2] 598e8def-9627-4bd6-860b-50370c98b23b {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.990892] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9edf89d-868e-4991-8c6d-26c26802bc90 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.996945] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for the task: (returnval){ [ 1833.996945] env[69227]: value = "task-3475181" [ 1833.996945] env[69227]: _type = "Task" [ 1833.996945] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.005133] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475181, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.053643] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1834.053643] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1834.053643] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1834.247317] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38496b8-d632-46c6-b408-e337a8e5d398 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.256097] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11c75a8-9872-4005-8935-18d87d9d3bf7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.285522] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e804a0-a55b-4131-aac3-36f0b2b9d1bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.293042] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784f05da-134c-4c7f-babe-de0585481480 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.307178] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475179, 'name': CreateVM_Task, 'duration_secs': 0.320653} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.314542] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1834.315026] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.316774] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.317046] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1834.317515] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1834.318092] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f072b36-1d7a-4f38-b64a-555a17c2d19c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.322766] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 1834.322766] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5295b496-5bab-210e-528a-69912893a801" [ 1834.322766] env[69227]: _type = "Task" [ 1834.322766] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.331768] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5295b496-5bab-210e-528a-69912893a801, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.506106] env[69227]: DEBUG oslo_vmware.api [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Task: {'id': task-3475181, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089549} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.506394] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1834.506582] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1834.506755] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1834.506922] env[69227]: INFO nova.compute.manager [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1834.507174] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1834.507360] env[69227]: DEBUG nova.compute.manager [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1834.507451] env[69227]: DEBUG nova.network.neutron [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1834.790008] env[69227]: DEBUG nova.network.neutron [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Updated VIF entry in instance network info cache for port f020640c-2f98-40dc-9355-37d11e8dab98. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1834.790384] env[69227]: DEBUG nova.network.neutron [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Updating instance_info_cache with network_info: [{"id": "f020640c-2f98-40dc-9355-37d11e8dab98", "address": "fa:16:3e:a0:0f:9b", "network": {"id": "dcee9cd9-fefe-4c4c-8275-56d50b0ee143", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1722961603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "991c407526cf4eb7abcde2911220437e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ace50835-5731-4c77-b6c0-3076d7b4aa21", "external-id": "nsx-vlan-transportzone-270", "segmentation_id": 270, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf020640c-2f", "ovs_interfaceid": "f020640c-2f98-40dc-9355-37d11e8dab98", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.819013] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1834.834427] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1834.834799] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1834.835031] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.021187] env[69227]: DEBUG nova.compute.manager [req-8be1405f-f46b-4135-8262-68ad28e7132c req-fa2c253e-9b3c-411d-b506-8c3ec15b9001 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Received event network-vif-deleted-17d96a99-82d2-4f7e-b6d1-a67e016850b7 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1835.021187] env[69227]: INFO nova.compute.manager [req-8be1405f-f46b-4135-8262-68ad28e7132c req-fa2c253e-9b3c-411d-b506-8c3ec15b9001 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Neutron deleted interface 17d96a99-82d2-4f7e-b6d1-a67e016850b7; detaching it from the instance and deleting it from the info cache [ 1835.021187] env[69227]: DEBUG nova.network.neutron [req-8be1405f-f46b-4135-8262-68ad28e7132c req-fa2c253e-9b3c-411d-b506-8c3ec15b9001 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.293184] env[69227]: DEBUG oslo_concurrency.lockutils [req-c1396f88-5f69-4c3b-83c0-c1d37b5bfcd0 req-1cf087e4-0d3d-4262-8db4-0d516047f726 service nova] Releasing lock "refresh_cache-cc0035fc-3edc-457b-a798-afa4f9ea7071" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1835.324427] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1835.324427] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.321s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1835.503115] env[69227]: DEBUG nova.network.neutron [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.523901] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e2b1604a-c039-4dc4-a3bc-76c5d7aeaefd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.535243] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8797fab-c13d-4f96-8881-9ec475671ccf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.575336] env[69227]: DEBUG nova.compute.manager [req-8be1405f-f46b-4135-8262-68ad28e7132c req-fa2c253e-9b3c-411d-b506-8c3ec15b9001 service nova] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Detach interface failed, port_id=17d96a99-82d2-4f7e-b6d1-a67e016850b7, reason: Instance 598e8def-9627-4bd6-860b-50370c98b23b could not be found. {{(pid=69227) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10980}} [ 1836.004444] env[69227]: INFO nova.compute.manager [-] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Took 1.50 seconds to deallocate network for instance. [ 1836.324267] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.325031] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1836.325031] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1836.567890] env[69227]: INFO nova.compute.manager [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Took 0.56 seconds to detach 1 volumes for instance. [ 1836.570217] env[69227]: DEBUG nova.compute.manager [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Deleting volume: 27a99270-f0aa-4783-8535-bc3056edfc8e {{(pid=69227) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 1836.831384] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.831535] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.831641] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.831765] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.831887] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832016] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832135] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832251] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832370] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Skipping network cache update for instance because it is being deleted. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9960}} [ 1836.832485] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832599] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1836.832717] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1837.109438] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1837.109787] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1837.109971] env[69227]: DEBUG nova.objects.instance [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lazy-loading 'resources' on Instance uuid 598e8def-9627-4bd6-860b-50370c98b23b {{(pid=69227) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1837.838806] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-944583d2-1d42-41b4-a80e-05cd97deaaa9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.846530] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e537557-3ea5-4c9e-b977-c4c2d2ec55a5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.876967] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9271b0d-825d-482c-b780-6e5f55c7e382 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.884313] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f20ddb-93ee-451d-8c77-5e1d047d44a6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.898232] env[69227]: DEBUG nova.compute.provider_tree [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.402395] env[69227]: DEBUG nova.scheduler.client.report [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1838.910944] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1838.933670] env[69227]: INFO nova.scheduler.client.report [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Deleted allocations for instance 598e8def-9627-4bd6-860b-50370c98b23b [ 1839.442000] env[69227]: DEBUG oslo_concurrency.lockutils [None req-95994992-b056-484e-80dc-0a84ee5e059a tempest-ServersTestBootFromVolume-1059047540 tempest-ServersTestBootFromVolume-1059047540-project-member] Lock "598e8def-9627-4bd6-860b-50370c98b23b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.166s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1851.784475] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1869.535234] env[69227]: WARNING oslo_vmware.rw_handles [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1869.535234] env[69227]: ERROR oslo_vmware.rw_handles [ 1869.535810] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1869.537380] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1869.537653] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Copying Virtual Disk [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/48c6f5a7-725c-4b15-8378-d8bd3ecf4305/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1869.537951] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ea6352a7-079a-475d-a1e6-a89e5fa3f0bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.547489] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for the task: (returnval){ [ 1869.547489] env[69227]: value = "task-3475183" [ 1869.547489] env[69227]: _type = "Task" [ 1869.547489] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.555555] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Task: {'id': task-3475183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.057625] env[69227]: DEBUG oslo_vmware.exceptions [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1870.057928] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1870.058537] env[69227]: ERROR nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1870.058537] env[69227]: Faults: ['InvalidArgument'] [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Traceback (most recent call last): [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] yield resources [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self.driver.spawn(context, instance, image_meta, [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self._fetch_image_if_missing(context, vi) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] image_cache(vi, tmp_image_ds_loc) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] vm_util.copy_virtual_disk( [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] session._wait_for_task(vmdk_copy_task) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return self.wait_for_task(task_ref) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return evt.wait() [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] result = hub.switch() [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return self.greenlet.switch() [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self.f(*self.args, **self.kw) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] raise exceptions.translate_fault(task_info.error) [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Faults: ['InvalidArgument'] [ 1870.058537] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] [ 1870.059447] env[69227]: INFO nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Terminating instance [ 1870.060400] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1870.060601] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.060851] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3d4c946-5de7-47a7-8df0-3fb84c398f50 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.062927] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.063061] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1870.063233] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1870.070349] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.070528] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1870.071711] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2590f76e-3794-4013-a2b0-6fc627bf4bd9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.077195] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for the task: (returnval){ [ 1870.077195] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52031924-7a38-505d-5807-26428073a704" [ 1870.077195] env[69227]: _type = "Task" [ 1870.077195] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.084665] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52031924-7a38-505d-5807-26428073a704, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.583385] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1870.590955] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1870.591619] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Creating directory with path [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1870.591619] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8856a0da-264f-4a1e-83cd-609385eabe32 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.612467] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Created directory with path [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1870.612669] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Fetch image to [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1870.612889] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1870.613631] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca5676ce-d3cd-423b-86e0-aafe8635c163 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.620520] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780ede6d-df85-4230-b4d0-809f21fbbc9c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.629483] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a50430-8d08-4025-8fda-cdc8eae85174 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.633621] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.663396] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9660ddfe-101d-46be-a852-a9d7ac67b102 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.670211] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7b945fde-93d0-4cd3-9fc3-e4bfd38daedc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.691510] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1870.741946] env[69227]: DEBUG oslo_vmware.rw_handles [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1870.802814] env[69227]: DEBUG oslo_vmware.rw_handles [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1870.803039] env[69227]: DEBUG oslo_vmware.rw_handles [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1871.136733] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Releasing lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1871.137195] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1871.137392] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1871.138297] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52d4f2ea-92bb-483b-9b67-38be6d8828aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.146101] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1871.146334] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-216b7bee-b309-437b-a7a0-49e853e1f843 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.175830] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1871.176043] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1871.176288] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Deleting the datastore file [datastore2] 20578860-89f7-4e25-9ccd-ccc39fa5e71f {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1871.176557] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0caea768-f780-496c-a107-cfcbb69e4428 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.182574] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for the task: (returnval){ [ 1871.182574] env[69227]: value = "task-3475185" [ 1871.182574] env[69227]: _type = "Task" [ 1871.182574] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.190126] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Task: {'id': task-3475185, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.692263] env[69227]: DEBUG oslo_vmware.api [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Task: {'id': task-3475185, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.039517} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.692647] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.692723] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1871.692849] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1871.693037] env[69227]: INFO nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1871.693282] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1871.693487] env[69227]: DEBUG nova.compute.manager [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1871.695485] env[69227]: DEBUG nova.compute.claims [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1871.695663] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1871.695937] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1872.345976] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefddd17-ebdd-44e4-b64f-57fb509253d2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.353261] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe9510c-2a25-42ec-9c96-fc6f4647a888 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.383029] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6fbc3e4-e5a1-4d4f-a832-3496bd2cbdc0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.390334] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4544c6c-3d90-438f-b1f4-dfacea268802 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.403739] env[69227]: DEBUG nova.compute.provider_tree [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.907011] env[69227]: DEBUG nova.scheduler.client.report [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1873.412343] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.716s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1873.413215] env[69227]: ERROR nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1873.413215] env[69227]: Faults: ['InvalidArgument'] [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Traceback (most recent call last): [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self.driver.spawn(context, instance, image_meta, [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self._fetch_image_if_missing(context, vi) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] image_cache(vi, tmp_image_ds_loc) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] vm_util.copy_virtual_disk( [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] session._wait_for_task(vmdk_copy_task) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return self.wait_for_task(task_ref) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return evt.wait() [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] result = hub.switch() [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] return self.greenlet.switch() [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] self.f(*self.args, **self.kw) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] raise exceptions.translate_fault(task_info.error) [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Faults: ['InvalidArgument'] [ 1873.413215] env[69227]: ERROR nova.compute.manager [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] [ 1873.414268] env[69227]: DEBUG nova.compute.utils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1873.416751] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Build of instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f was re-scheduled: A specified parameter was not correct: fileType [ 1873.416751] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1873.417315] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1873.417664] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.417926] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1873.418196] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1873.937924] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1873.983932] env[69227]: DEBUG nova.network.neutron [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.486819] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Releasing lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1874.487085] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1874.487278] env[69227]: DEBUG nova.compute.manager [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 1875.515915] env[69227]: INFO nova.scheduler.client.report [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Deleted allocations for instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f [ 1876.025042] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ccdb001a-fdae-48ab-99d2-3661216876cf tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 577.046s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1876.025674] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 380.655s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1876.025904] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1876.026122] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1876.026290] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1876.028589] env[69227]: INFO nova.compute.manager [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Terminating instance [ 1876.030210] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquiring lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.030360] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Acquired lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1876.030523] env[69227]: DEBUG nova.network.neutron [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1876.529877] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1876.630556] env[69227]: DEBUG nova.network.neutron [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1876.681719] env[69227]: DEBUG nova.network.neutron [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.050597] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1877.050893] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1877.052325] env[69227]: INFO nova.compute.claims [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1877.184198] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Releasing lock "refresh_cache-20578860-89f7-4e25-9ccd-ccc39fa5e71f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1877.184630] env[69227]: DEBUG nova.compute.manager [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1877.184825] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1877.185140] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-98970443-580d-47b0-a575-16f52bb23905 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.194075] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f8cf34-cc73-487b-b97b-dafeb6143724 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.223562] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20578860-89f7-4e25-9ccd-ccc39fa5e71f could not be found. [ 1877.223778] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1877.223986] env[69227]: INFO nova.compute.manager [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1877.224247] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1877.224479] env[69227]: DEBUG nova.compute.manager [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1877.224576] env[69227]: DEBUG nova.network.neutron [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1877.240638] env[69227]: DEBUG nova.network.neutron [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1877.744056] env[69227]: DEBUG nova.network.neutron [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.196752] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2ccfc6-ca90-4101-a4e7-c5b4ab1b90e3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.203981] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ad3a58-4430-4182-95db-710ea8acf1fc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.233590] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c7dd54-51fc-471e-9519-80817a61f277 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.240456] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af35908-64c1-403e-ad9a-4820ac6e4cac {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.245717] env[69227]: INFO nova.compute.manager [-] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] Took 1.02 seconds to deallocate network for instance. [ 1878.254388] env[69227]: DEBUG nova.compute.provider_tree [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1878.760073] env[69227]: DEBUG nova.scheduler.client.report [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1879.267662] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.217s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1879.268192] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1879.280637] env[69227]: DEBUG oslo_concurrency.lockutils [None req-9da98dcb-ff2c-4ac9-bb0b-83a405df84a8 tempest-ServerShowV254Test-1862314207 tempest-ServerShowV254Test-1862314207-project-member] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.255s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1879.281500] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 90.661s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1879.281704] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 20578860-89f7-4e25-9ccd-ccc39fa5e71f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1879.281881] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "20578860-89f7-4e25-9ccd-ccc39fa5e71f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1879.773680] env[69227]: DEBUG nova.compute.utils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1879.775152] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1879.775271] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1879.811701] env[69227]: DEBUG nova.policy [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e3fc498803e0404caacac2885c584816', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a300c84c11744acd8da2d95e98ad5654', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1880.097874] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Successfully created port: 26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1880.278786] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1881.287698] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1881.311932] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1881.312198] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1881.312355] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1881.312536] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1881.312682] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1881.312829] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1881.313463] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1881.313463] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1881.313463] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1881.313613] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1881.313656] env[69227]: DEBUG nova.virt.hardware [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1881.314529] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb12c370-e0d0-49f8-8c7c-b72055c2dd8c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.322286] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4250023-f1e4-4fe0-b342-8a5e91e761a8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.491946] env[69227]: DEBUG nova.compute.manager [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Received event network-vif-plugged-26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1881.492195] env[69227]: DEBUG oslo_concurrency.lockutils [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] Acquiring lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1881.492399] env[69227]: DEBUG oslo_concurrency.lockutils [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1881.492563] env[69227]: DEBUG oslo_concurrency.lockutils [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1881.492729] env[69227]: DEBUG nova.compute.manager [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] No waiting events found dispatching network-vif-plugged-26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1881.492892] env[69227]: WARNING nova.compute.manager [req-23a70215-bfa1-4593-a63a-64259a45af94 req-f457c482-cc2a-4e62-97c1-b647ee67d8ea service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Received unexpected event network-vif-plugged-26553feb-8bd7-4dd8-8713-c07fd83b3a3c for instance with vm_state building and task_state spawning. [ 1881.573364] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Successfully updated port: 26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1882.075448] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.075658] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquired lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1882.075838] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1882.607872] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1882.731961] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Updating instance_info_cache with network_info: [{"id": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "address": "fa:16:3e:fd:4b:71", "network": {"id": "a42001e4-a50b-4011-b0ff-b403ebcc96c2", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1404947175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a300c84c11744acd8da2d95e98ad5654", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26553feb-8b", "ovs_interfaceid": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.234313] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Releasing lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1883.234643] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance network_info: |[{"id": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "address": "fa:16:3e:fd:4b:71", "network": {"id": "a42001e4-a50b-4011-b0ff-b403ebcc96c2", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1404947175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a300c84c11744acd8da2d95e98ad5654", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26553feb-8b", "ovs_interfaceid": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1883.235105] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fd:4b:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7874ee7f-20c7-4bd8-a750-ed489e9acc65', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '26553feb-8bd7-4dd8-8713-c07fd83b3a3c', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1883.242779] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Creating folder: Project (a300c84c11744acd8da2d95e98ad5654). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1883.243139] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0133a97e-39a6-460d-bf2f-e1d84b3871bd {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.254848] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Created folder: Project (a300c84c11744acd8da2d95e98ad5654) in parent group-v694623. [ 1883.254999] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Creating folder: Instances. Parent ref: group-v694737. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1883.255235] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09126a71-62c1-439c-a78f-d55490175351 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.263506] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Created folder: Instances in parent group-v694737. [ 1883.263719] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1883.263893] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1883.264091] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5640e51d-923e-471a-a89a-6524f918f817 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.282551] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1883.282551] env[69227]: value = "task-3475188" [ 1883.282551] env[69227]: _type = "Task" [ 1883.282551] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.289377] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475188, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.427277] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1883.516880] env[69227]: DEBUG nova.compute.manager [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Received event network-changed-26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1883.517143] env[69227]: DEBUG nova.compute.manager [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Refreshing instance network info cache due to event network-changed-26553feb-8bd7-4dd8-8713-c07fd83b3a3c. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1883.517371] env[69227]: DEBUG oslo_concurrency.lockutils [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] Acquiring lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.517513] env[69227]: DEBUG oslo_concurrency.lockutils [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] Acquired lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1883.517674] env[69227]: DEBUG nova.network.neutron [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Refreshing network info cache for port 26553feb-8bd7-4dd8-8713-c07fd83b3a3c {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1883.793413] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475188, 'name': CreateVM_Task, 'duration_secs': 0.292508} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.793696] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1883.800382] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.800560] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1883.800856] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1883.801123] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-320d85f0-fda5-4ada-a428-2b573440e3d5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.805134] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for the task: (returnval){ [ 1883.805134] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522d0e7c-a646-63a4-240f-55eaa4fe5559" [ 1883.805134] env[69227]: _type = "Task" [ 1883.805134] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.812840] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522d0e7c-a646-63a4-240f-55eaa4fe5559, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.283262] env[69227]: DEBUG nova.network.neutron [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Updated VIF entry in instance network info cache for port 26553feb-8bd7-4dd8-8713-c07fd83b3a3c. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1884.283628] env[69227]: DEBUG nova.network.neutron [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Updating instance_info_cache with network_info: [{"id": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "address": "fa:16:3e:fd:4b:71", "network": {"id": "a42001e4-a50b-4011-b0ff-b403ebcc96c2", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1404947175-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a300c84c11744acd8da2d95e98ad5654", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7874ee7f-20c7-4bd8-a750-ed489e9acc65", "external-id": "nsx-vlan-transportzone-753", "segmentation_id": 753, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap26553feb-8b", "ovs_interfaceid": "26553feb-8bd7-4dd8-8713-c07fd83b3a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.315486] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1884.315743] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1884.315952] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.789626] env[69227]: DEBUG oslo_concurrency.lockutils [req-1ca7d821-fe81-4fc0-95ad-0cb0d1884634 req-36060caf-05c0-4c15-9509-fc6e56ab5552 service nova] Releasing lock "refresh_cache-8ed695cd-8c17-43e0-ba42-081f2aecd8c2" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1887.050788] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1888.427032] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.426878] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1889.427079] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1890.166439] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1890.427338] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.427621] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.427746] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.931060] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1890.931330] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1890.931504] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1890.931662] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1890.932561] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff5c4e1-f7de-4d0c-a592-aa2c32bbf86c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.940687] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288378d7-8df9-4cb5-98de-ca6f19117181 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.956038] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af06a6d-d202-41ad-a7c0-1347fddb7fd9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.961941] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a4121a-cfcb-47bc-ad40-d1096f739529 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.990506] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180975MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1890.990506] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1890.990506] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1892.020404] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.020643] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.020682] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.020795] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.020912] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.021041] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.021160] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.021276] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.021392] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.021504] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1892.524889] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1893.028425] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1893.531595] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1893.531854] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1893.532016] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1893.674028] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c831744-9253-4d0e-8f59-a78141a9ecfc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.681502] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4e8cea-8f84-4660-a060-d1d8f67b4809 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.711931] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8361ab4-da8e-4699-ac67-ff10db303f58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.718556] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ff246b-22a0-4ded-915c-4d314f577e5b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.731221] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1894.234740] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1894.742705] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1894.742912] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.752s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1895.737862] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.738169] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.738277] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1895.738399] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1896.243867] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244051] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244188] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244315] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244439] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244560] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244679] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244796] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.244914] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.245041] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1896.245164] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1896.245379] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1903.930627] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1919.578370] env[69227]: WARNING oslo_vmware.rw_handles [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1919.578370] env[69227]: ERROR oslo_vmware.rw_handles [ 1919.579065] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1919.580801] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1919.581061] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Copying Virtual Disk [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/13774f6a-4f01-432c-aa26-373896757960/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1919.581348] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a787ac21-519d-4657-9fda-9931c19c4ef1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.589343] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for the task: (returnval){ [ 1919.589343] env[69227]: value = "task-3475189" [ 1919.589343] env[69227]: _type = "Task" [ 1919.589343] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.597031] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Task: {'id': task-3475189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.101276] env[69227]: DEBUG oslo_vmware.exceptions [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1920.101745] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1920.102477] env[69227]: ERROR nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1920.102477] env[69227]: Faults: ['InvalidArgument'] [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Traceback (most recent call last): [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] yield resources [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self.driver.spawn(context, instance, image_meta, [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self._fetch_image_if_missing(context, vi) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] image_cache(vi, tmp_image_ds_loc) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] vm_util.copy_virtual_disk( [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] session._wait_for_task(vmdk_copy_task) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return self.wait_for_task(task_ref) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return evt.wait() [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] result = hub.switch() [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return self.greenlet.switch() [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self.f(*self.args, **self.kw) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] raise exceptions.translate_fault(task_info.error) [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Faults: ['InvalidArgument'] [ 1920.102477] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] [ 1920.104278] env[69227]: INFO nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Terminating instance [ 1920.104522] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1920.104733] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1920.105376] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1920.105541] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1920.105773] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5561e347-f178-453d-ac60-d58953b0ee5a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.108039] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a733548-ccac-44b6-9b8c-c5e2066093a2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.114910] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1920.116044] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bdf45ae-bda4-42bb-ba29-54ab0ff63294 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.117245] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1920.117417] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1920.118362] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eab5053e-5933-4b23-84de-dc3e20cf67ab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.123325] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for the task: (returnval){ [ 1920.123325] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b036b5-3f7d-698a-6362-04e83d09fc22" [ 1920.123325] env[69227]: _type = "Task" [ 1920.123325] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.132434] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52b036b5-3f7d-698a-6362-04e83d09fc22, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.187744] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1920.188017] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1920.188225] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Deleting the datastore file [datastore2] 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1920.188515] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d79b8ac8-8c21-448f-8646-3e16a404056d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.194559] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for the task: (returnval){ [ 1920.194559] env[69227]: value = "task-3475191" [ 1920.194559] env[69227]: _type = "Task" [ 1920.194559] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.202021] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Task: {'id': task-3475191, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.633895] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1920.634167] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Creating directory with path [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1920.634410] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9d8aca4-d19c-4ccb-98c4-f57e582d06b8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.645113] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Created directory with path [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1920.645312] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Fetch image to [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1920.645532] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1920.646303] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df795bf0-41e1-427b-975c-62b31a719487 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.652471] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b11c039-b804-45de-a56f-71450c9192d3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.661108] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5948a012-48ee-4f16-8713-40f2d21c2143 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.690587] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe1f2a98-e112-43e6-9e00-26507264ad12 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.698050] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c8ca0822-95f3-492f-95bf-8d23c7c4bf22 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.704472] env[69227]: DEBUG oslo_vmware.api [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Task: {'id': task-3475191, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071432} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.704714] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1920.704892] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1920.705072] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1920.705246] env[69227]: INFO nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1920.708870] env[69227]: DEBUG nova.compute.claims [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1920.709077] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1920.709323] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1920.718635] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1920.855928] env[69227]: DEBUG oslo_vmware.rw_handles [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1920.914884] env[69227]: DEBUG oslo_vmware.rw_handles [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1920.915082] env[69227]: DEBUG oslo_vmware.rw_handles [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1921.348726] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969efbf9-3162-40b0-b834-497cea604557 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.356207] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143b115b-e085-43a9-bc76-84e9fc276e8a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.385617] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cdb91dd-908a-47af-acd0-ae4b8dd9f1f0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.392485] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10a84d2-5765-4d7a-9e34-297078d676de {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.405806] env[69227]: DEBUG nova.compute.provider_tree [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1921.911018] env[69227]: DEBUG nova.scheduler.client.report [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1922.414570] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.705s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1922.415170] env[69227]: ERROR nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.415170] env[69227]: Faults: ['InvalidArgument'] [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Traceback (most recent call last): [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self.driver.spawn(context, instance, image_meta, [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self._fetch_image_if_missing(context, vi) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] image_cache(vi, tmp_image_ds_loc) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] vm_util.copy_virtual_disk( [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] session._wait_for_task(vmdk_copy_task) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return self.wait_for_task(task_ref) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return evt.wait() [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] result = hub.switch() [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] return self.greenlet.switch() [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] self.f(*self.args, **self.kw) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] raise exceptions.translate_fault(task_info.error) [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Faults: ['InvalidArgument'] [ 1922.415170] env[69227]: ERROR nova.compute.manager [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] [ 1922.415913] env[69227]: DEBUG nova.compute.utils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1922.417724] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Build of instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 was re-scheduled: A specified parameter was not correct: fileType [ 1922.417724] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1922.418095] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1922.418287] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1922.418446] env[69227]: DEBUG nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1922.418606] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1923.142060] env[69227]: DEBUG nova.network.neutron [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.645099] env[69227]: INFO nova.compute.manager [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Took 1.23 seconds to deallocate network for instance. [ 1924.675786] env[69227]: INFO nova.scheduler.client.report [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Deleted allocations for instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 [ 1925.183921] env[69227]: DEBUG oslo_concurrency.lockutils [None req-dd9d9910-9f18-4bf0-a3e0-f033848856cc tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 569.596s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1925.185254] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 373.628s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1925.185483] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Acquiring lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1925.185689] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1925.185882] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1925.189768] env[69227]: INFO nova.compute.manager [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Terminating instance [ 1925.191560] env[69227]: DEBUG nova.compute.manager [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1925.191759] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1925.192034] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf009639-c86a-47be-baee-d65cb98aa993 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.201047] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4684105-f3ca-4d7f-a22d-70f99ef02636 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.231723] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ccb6955-9796-4f7f-bc22-a3e9563d3f43 could not be found. [ 1925.231934] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1925.232121] env[69227]: INFO nova.compute.manager [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1925.232370] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1925.232608] env[69227]: DEBUG nova.compute.manager [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1925.232700] env[69227]: DEBUG nova.network.neutron [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1925.688633] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1925.752920] env[69227]: DEBUG nova.network.neutron [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.206680] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1926.206952] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1926.208499] env[69227]: INFO nova.compute.claims [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1926.255528] env[69227]: INFO nova.compute.manager [-] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] Took 1.02 seconds to deallocate network for instance. [ 1927.276935] env[69227]: DEBUG oslo_concurrency.lockutils [None req-7c3eccd1-24c2-4278-9808-a1c532380f92 tempest-ServerActionsTestOtherA-2092946665 tempest-ServerActionsTestOtherA-2092946665-project-member] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.092s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1927.278499] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 138.658s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1927.278713] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ccb6955-9796-4f7f-bc22-a3e9563d3f43] During sync_power_state the instance has a pending task (deleting). Skip. [ 1927.278890] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "8ccb6955-9796-4f7f-bc22-a3e9563d3f43" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1927.349976] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82956880-150f-4769-8080-30cff595cb22 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.357541] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9cf578-2ae7-4988-9ae8-ea2cb371c199 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.386984] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41109a84-d27c-42a2-b801-0595a51325aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.393547] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02f61bdc-9746-4a1a-b46a-cf67efba7639 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.405994] env[69227]: DEBUG nova.compute.provider_tree [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.909129] env[69227]: DEBUG nova.scheduler.client.report [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1928.413635] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1928.414169] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1928.918892] env[69227]: DEBUG nova.compute.utils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.920376] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1928.920551] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1928.956958] env[69227]: DEBUG nova.policy [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e1869fbfffc40e0a5928efbdf5f2a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00727f8cf5ba447f88abe6ab2c951aa7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1929.223513] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Successfully created port: 354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1929.423760] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1930.434375] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1930.459406] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1930.459661] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1930.459815] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1930.459993] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1930.460186] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1930.460377] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1930.460628] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1930.460791] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1930.460956] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1930.461140] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1930.461318] env[69227]: DEBUG nova.virt.hardware [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1930.462334] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28777625-eb70-4a09-bdc8-3d6041a44061 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.470673] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5c94e4-7b2b-49bf-9160-f7f1c93b283f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.649750] env[69227]: DEBUG nova.compute.manager [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Received event network-vif-plugged-354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1930.649993] env[69227]: DEBUG oslo_concurrency.lockutils [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] Acquiring lock "5539b326-2f24-45b7-874a-edc484e82267-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1930.650266] env[69227]: DEBUG oslo_concurrency.lockutils [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] Lock "5539b326-2f24-45b7-874a-edc484e82267-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1930.650442] env[69227]: DEBUG oslo_concurrency.lockutils [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] Lock "5539b326-2f24-45b7-874a-edc484e82267-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1930.650807] env[69227]: DEBUG nova.compute.manager [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] No waiting events found dispatching network-vif-plugged-354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1930.651025] env[69227]: WARNING nova.compute.manager [req-c143734f-0fa5-4956-98e9-7308db496d4b req-4343f353-0d3a-450f-baff-060bd29a61f5 service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Received unexpected event network-vif-plugged-354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 for instance with vm_state building and task_state spawning. [ 1930.730184] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Successfully updated port: 354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1931.233543] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.233704] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1931.233990] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1931.763084] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1931.882427] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Updating instance_info_cache with network_info: [{"id": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "address": "fa:16:3e:83:f4:c4", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap354e0f02-b1", "ovs_interfaceid": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1932.385032] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Releasing lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1932.385382] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance network_info: |[{"id": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "address": "fa:16:3e:83:f4:c4", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap354e0f02-b1", "ovs_interfaceid": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1932.385825] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:f4:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6365036-aa37-44d2-90d1-ca1c3516ded9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '354e0f02-b1ac-4b95-8933-b4c7d7fb5a57', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1932.393226] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Creating folder: Project (00727f8cf5ba447f88abe6ab2c951aa7). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1932.393513] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7aded77-3394-4c48-9546-967969393b3c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.406352] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Created folder: Project (00727f8cf5ba447f88abe6ab2c951aa7) in parent group-v694623. [ 1932.406534] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Creating folder: Instances. Parent ref: group-v694740. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1932.406746] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9c325882-d8c5-462a-ad8f-ad017b8edf88 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.416381] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Created folder: Instances in parent group-v694740. [ 1932.416615] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1932.416793] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1932.416981] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ca6d5ce-fd08-4ec9-8215-9e6e43a1c109 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.435823] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1932.435823] env[69227]: value = "task-3475194" [ 1932.435823] env[69227]: _type = "Task" [ 1932.435823] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.445806] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475194, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.677813] env[69227]: DEBUG nova.compute.manager [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Received event network-changed-354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1932.678029] env[69227]: DEBUG nova.compute.manager [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Refreshing instance network info cache due to event network-changed-354e0f02-b1ac-4b95-8933-b4c7d7fb5a57. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1932.678261] env[69227]: DEBUG oslo_concurrency.lockutils [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] Acquiring lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.678395] env[69227]: DEBUG oslo_concurrency.lockutils [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] Acquired lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1932.678558] env[69227]: DEBUG nova.network.neutron [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Refreshing network info cache for port 354e0f02-b1ac-4b95-8933-b4c7d7fb5a57 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1932.947566] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475194, 'name': CreateVM_Task, 'duration_secs': 0.273755} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.947912] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1932.948479] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.948726] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1932.949206] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1932.949544] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f35a11a4-2ccd-4951-bdfa-2bc200823e3e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.954044] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for the task: (returnval){ [ 1932.954044] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52fc2546-5003-d60a-09b9-d57c0d87e1e3" [ 1932.954044] env[69227]: _type = "Task" [ 1932.954044] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.964357] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52fc2546-5003-d60a-09b9-d57c0d87e1e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.353188] env[69227]: DEBUG nova.network.neutron [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Updated VIF entry in instance network info cache for port 354e0f02-b1ac-4b95-8933-b4c7d7fb5a57. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1933.353562] env[69227]: DEBUG nova.network.neutron [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Updating instance_info_cache with network_info: [{"id": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "address": "fa:16:3e:83:f4:c4", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap354e0f02-b1", "ovs_interfaceid": "354e0f02-b1ac-4b95-8933-b4c7d7fb5a57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.464906] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1933.465178] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1933.465403] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.858883] env[69227]: DEBUG oslo_concurrency.lockutils [req-4d22c7e5-d7eb-40e2-a8d6-72c4091482ae req-30cbc15a-ea47-4fb1-9a1c-278f1fce6a2d service nova] Releasing lock "refresh_cache-5539b326-2f24-45b7-874a-edc484e82267" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1941.684762] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "0b31dc0b-6a70-41aa-adbe-d989a002adca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1941.685059] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "0b31dc0b-6a70-41aa-adbe-d989a002adca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1945.427413] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1947.172107] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "5539b326-2f24-45b7-874a-edc484e82267" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1948.427779] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.427300] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1949.427486] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 1951.427143] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.427103] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.427340] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 1952.427576] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 1952.932694] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.932842] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.932975] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933117] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933240] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933362] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933480] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933596] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933711] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933823] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 1952.933939] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 1952.934174] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.934338] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1952.934489] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1953.437765] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1953.438098] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1953.438273] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1953.438447] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1953.439357] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-167acc2d-d75e-4e43-b3c6-a4cb0b16655b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.447768] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13c2fa46-9da9-47ff-af28-d07918f8e26f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.462744] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff556997-0bc4-4cfb-918c-e6d4f7475cf9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.469023] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78485410-e930-4775-8113-f4eb68b3a29c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.497435] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180905MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1953.497589] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1953.497809] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1954.528279] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.528515] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.528574] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.528692] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.528811] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.528928] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.529055] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.529173] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.529336] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1954.529461] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1955.032407] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1955.535441] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1956.038534] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1956.038839] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1956.039033] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1956.187781] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad203422-19c3-4e59-b704-d98a484b3393 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.195078] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597b320b-cc6a-41ea-bb02-7a45bb75f233 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.225981] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a3b810-f1a2-4126-8857-00ff8ed3f85e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.232852] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51305cd6-9e26-4c11-b64b-885dcc5ee672 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.245436] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1956.748934] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1957.255791] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1957.256071] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.758s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1958.251547] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1967.011800] env[69227]: WARNING oslo_vmware.rw_handles [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1967.011800] env[69227]: ERROR oslo_vmware.rw_handles [ 1967.012580] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1967.014300] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1967.014536] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Copying Virtual Disk [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/ac7a88de-6b8d-49ab-b7be-c552d63d4a30/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1967.014812] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-04e3a25c-490d-4faa-9abc-713fe41188a6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.022505] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for the task: (returnval){ [ 1967.022505] env[69227]: value = "task-3475195" [ 1967.022505] env[69227]: _type = "Task" [ 1967.022505] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.030037] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Task: {'id': task-3475195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.533218] env[69227]: DEBUG oslo_vmware.exceptions [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1967.533488] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1967.534053] env[69227]: ERROR nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.534053] env[69227]: Faults: ['InvalidArgument'] [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Traceback (most recent call last): [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] yield resources [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self.driver.spawn(context, instance, image_meta, [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self._fetch_image_if_missing(context, vi) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] image_cache(vi, tmp_image_ds_loc) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] vm_util.copy_virtual_disk( [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] session._wait_for_task(vmdk_copy_task) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return self.wait_for_task(task_ref) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return evt.wait() [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] result = hub.switch() [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return self.greenlet.switch() [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self.f(*self.args, **self.kw) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] raise exceptions.translate_fault(task_info.error) [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Faults: ['InvalidArgument'] [ 1967.534053] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] [ 1967.535016] env[69227]: INFO nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Terminating instance [ 1967.535890] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1967.536106] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1967.536349] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-512f557f-6a36-4439-8342-cbb6f917a3d4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.538526] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1967.538717] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1967.539472] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6e21bc-b93b-451d-869c-7054cfc489db {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.546474] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1967.546717] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40b73b28-dca6-4cf8-9053-75453e49e61f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.548878] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1967.549069] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1967.550030] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f56c9ee-745e-4fd4-b03a-738bebbf2ea5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.554273] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for the task: (returnval){ [ 1967.554273] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d7fc25-093f-441e-66ea-9aabdefe9e11" [ 1967.554273] env[69227]: _type = "Task" [ 1967.554273] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.561098] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d7fc25-093f-441e-66ea-9aabdefe9e11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.616901] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1967.617148] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1967.617347] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Deleting the datastore file [datastore2] 1b975f6d-7e12-44cd-99c4-c480edc286bd {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1967.617619] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4b60d063-6a92-4445-802d-4d1fec82c576 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.626948] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for the task: (returnval){ [ 1967.626948] env[69227]: value = "task-3475197" [ 1967.626948] env[69227]: _type = "Task" [ 1967.626948] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1967.635235] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Task: {'id': task-3475197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.065066] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1968.065523] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Creating directory with path [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1968.065523] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be45cac2-9ec7-49f4-b141-8cf03a9eb819 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.077060] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Created directory with path [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1968.077289] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Fetch image to [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1968.077533] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1968.078303] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8db98d-53dc-4463-8a1a-70b6f61054d4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.084555] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97698c3e-01ab-48d0-b558-7e4da93e0acb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.093361] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d9d3fb-685c-4add-9a71-ae66988a484a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.122972] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85cb4945-2036-471b-ad96-d2f5bef9325f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.130531] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-95620cc1-79fc-447e-8c02-f5e95337fd90 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.136358] env[69227]: DEBUG oslo_vmware.api [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Task: {'id': task-3475197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076666} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.136619] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1968.136835] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1968.137057] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1968.137273] env[69227]: INFO nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1968.139340] env[69227]: DEBUG nova.compute.claims [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 1968.139528] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1968.139764] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1968.149283] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1968.199894] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1968.259940] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1968.260176] env[69227]: DEBUG oslo_vmware.rw_handles [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1968.783173] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99047ff-1ff7-4507-be31-4d60886dcc85 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.790942] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098f99cc-0c80-47a7-9eda-8d4430133cb1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.821735] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be772ba4-06bc-4dc6-86a7-d07bec3423a9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.828795] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21c51ce-8b82-4dfb-83c6-98545ba74814 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.841947] env[69227]: DEBUG nova.compute.provider_tree [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1969.345257] env[69227]: DEBUG nova.scheduler.client.report [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1969.850640] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.711s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1969.851193] env[69227]: ERROR nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1969.851193] env[69227]: Faults: ['InvalidArgument'] [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Traceback (most recent call last): [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self.driver.spawn(context, instance, image_meta, [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self._fetch_image_if_missing(context, vi) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] image_cache(vi, tmp_image_ds_loc) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] vm_util.copy_virtual_disk( [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] session._wait_for_task(vmdk_copy_task) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return self.wait_for_task(task_ref) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return evt.wait() [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] result = hub.switch() [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] return self.greenlet.switch() [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] self.f(*self.args, **self.kw) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] raise exceptions.translate_fault(task_info.error) [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Faults: ['InvalidArgument'] [ 1969.851193] env[69227]: ERROR nova.compute.manager [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] [ 1969.852032] env[69227]: DEBUG nova.compute.utils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1969.853641] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Build of instance 1b975f6d-7e12-44cd-99c4-c480edc286bd was re-scheduled: A specified parameter was not correct: fileType [ 1969.853641] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 1969.854000] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 1969.854190] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 1969.854360] env[69227]: DEBUG nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1969.854526] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1970.842277] env[69227]: DEBUG nova.network.neutron [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1971.345283] env[69227]: INFO nova.compute.manager [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Took 1.49 seconds to deallocate network for instance. [ 1972.376513] env[69227]: INFO nova.scheduler.client.report [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Deleted allocations for instance 1b975f6d-7e12-44cd-99c4-c480edc286bd [ 1972.884420] env[69227]: DEBUG oslo_concurrency.lockutils [None req-46085d5c-8058-4ab6-a51a-0c93967fba53 tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 539.349s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1972.885724] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 344.029s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1972.885953] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Acquiring lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1972.886185] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1972.886353] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1972.889323] env[69227]: INFO nova.compute.manager [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Terminating instance [ 1972.891053] env[69227]: DEBUG nova.compute.manager [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1972.891255] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1972.891709] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a6a16559-90a2-45f1-9a08-4dd74828be40 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.901152] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5478fa4-f7ed-4fcb-9384-96fa24410fe2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.930964] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1b975f6d-7e12-44cd-99c4-c480edc286bd could not be found. [ 1972.931209] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1972.931391] env[69227]: INFO nova.compute.manager [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1972.931645] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1972.931907] env[69227]: DEBUG nova.compute.manager [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1972.931987] env[69227]: DEBUG nova.network.neutron [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1973.392958] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1973.450386] env[69227]: DEBUG nova.network.neutron [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.915968] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1973.915968] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1973.916812] env[69227]: INFO nova.compute.claims [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1973.953208] env[69227]: INFO nova.compute.manager [-] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] Took 1.02 seconds to deallocate network for instance. [ 1974.974537] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6ac06456-a1fa-4d3d-989e-c5eb31481c0e tempest-ServerDiskConfigTestJSON-1952933683 tempest-ServerDiskConfigTestJSON-1952933683-project-member] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.089s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1974.975448] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 186.355s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1974.975643] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1b975f6d-7e12-44cd-99c4-c480edc286bd] During sync_power_state the instance has a pending task (deleting). Skip. [ 1974.975812] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1b975f6d-7e12-44cd-99c4-c480edc286bd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1975.058754] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18771e63-6922-4c16-af28-ad8207ee5a8f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.066432] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0cbd94-92c3-4dbf-a578-9813b9a6d50e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.096747] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162a6e98-50ee-4f20-9c91-3d44cc7bdcc5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.103933] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af2263b-ed3c-4cb5-a0d8-b605c75bdb4a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.116744] env[69227]: DEBUG nova.compute.provider_tree [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1975.620167] env[69227]: DEBUG nova.scheduler.client.report [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1976.125489] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.210s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1976.125997] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1976.631294] env[69227]: DEBUG nova.compute.utils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1976.632668] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1976.632845] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1976.667693] env[69227]: DEBUG nova.policy [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1f11c04b45e54613ae31393b0aaf0c4f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f0d2e21208b4b11928219d5972e8b0c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 1976.943900] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Successfully created port: d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1977.135924] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1978.146234] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1978.171575] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1978.171811] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1978.171970] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1978.172278] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1978.172448] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1978.172599] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1978.172822] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1978.172973] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1978.173154] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1978.173316] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1978.173489] env[69227]: DEBUG nova.virt.hardware [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1978.174374] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d9a50-65cb-451a-b9be-dde90b5734ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.182644] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607cc8de-bf62-4110-a7a2-e93a0fb13d59 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.298479] env[69227]: DEBUG nova.compute.manager [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Received event network-vif-plugged-d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1978.298712] env[69227]: DEBUG oslo_concurrency.lockutils [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] Acquiring lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1978.298888] env[69227]: DEBUG oslo_concurrency.lockutils [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1978.299072] env[69227]: DEBUG oslo_concurrency.lockutils [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1978.299220] env[69227]: DEBUG nova.compute.manager [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] No waiting events found dispatching network-vif-plugged-d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1978.299377] env[69227]: WARNING nova.compute.manager [req-a5c045f3-c0b3-4952-8534-0a50c0901a54 req-e0ebcfec-6784-47b2-bdf8-4e94858b11e9 service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Received unexpected event network-vif-plugged-d1369906-26c9-4208-a0a3-68666bf5f09d for instance with vm_state building and task_state spawning. [ 1978.378749] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Successfully updated port: d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1978.880957] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1978.881118] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquired lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1978.881268] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1979.411063] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1979.524133] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Updating instance_info_cache with network_info: [{"id": "d1369906-26c9-4208-a0a3-68666bf5f09d", "address": "fa:16:3e:94:e5:9c", "network": {"id": "b0e21eb1-e26e-4fb7-a06d-ed39247589ff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1676427415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0d2e21208b4b11928219d5972e8b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1369906-26", "ovs_interfaceid": "d1369906-26c9-4208-a0a3-68666bf5f09d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.026651] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Releasing lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1980.026990] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance network_info: |[{"id": "d1369906-26c9-4208-a0a3-68666bf5f09d", "address": "fa:16:3e:94:e5:9c", "network": {"id": "b0e21eb1-e26e-4fb7-a06d-ed39247589ff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1676427415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0d2e21208b4b11928219d5972e8b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1369906-26", "ovs_interfaceid": "d1369906-26c9-4208-a0a3-68666bf5f09d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1980.027438] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:e5:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b107fab-ee71-47db-ad4d-3c6f05546843', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1369906-26c9-4208-a0a3-68666bf5f09d', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1980.034730] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Creating folder: Project (1f0d2e21208b4b11928219d5972e8b0c). Parent ref: group-v694623. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1980.034987] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12e36261-2d00-48eb-afcc-d43bca828424 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.046207] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Created folder: Project (1f0d2e21208b4b11928219d5972e8b0c) in parent group-v694623. [ 1980.046382] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Creating folder: Instances. Parent ref: group-v694743. {{(pid=69227) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1980.046585] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e8fa40b-c1c4-46b0-b0c1-c39fd3a1f38c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.056780] env[69227]: INFO nova.virt.vmwareapi.vm_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Created folder: Instances in parent group-v694743. [ 1980.056995] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1980.057176] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1980.057358] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd894549-1e8f-4768-ae2e-99fc66d3e811 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.074768] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1980.074768] env[69227]: value = "task-3475200" [ 1980.074768] env[69227]: _type = "Task" [ 1980.074768] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.081915] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475200, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.324043] env[69227]: DEBUG nova.compute.manager [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Received event network-changed-d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 1980.324284] env[69227]: DEBUG nova.compute.manager [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Refreshing instance network info cache due to event network-changed-d1369906-26c9-4208-a0a3-68666bf5f09d. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 1980.324530] env[69227]: DEBUG oslo_concurrency.lockutils [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] Acquiring lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.324705] env[69227]: DEBUG oslo_concurrency.lockutils [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] Acquired lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1980.324900] env[69227]: DEBUG nova.network.neutron [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Refreshing network info cache for port d1369906-26c9-4208-a0a3-68666bf5f09d {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1980.584640] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475200, 'name': CreateVM_Task, 'duration_secs': 0.299962} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.585114] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1980.585789] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.585955] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1980.586322] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1980.586570] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-538023c5-e3b8-4c23-ba13-233a177bcc93 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.591152] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for the task: (returnval){ [ 1980.591152] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52219589-9070-b17c-4693-c17cc691ed37" [ 1980.591152] env[69227]: _type = "Task" [ 1980.591152] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.598716] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52219589-9070-b17c-4693-c17cc691ed37, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.006059] env[69227]: DEBUG nova.network.neutron [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Updated VIF entry in instance network info cache for port d1369906-26c9-4208-a0a3-68666bf5f09d. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1981.006423] env[69227]: DEBUG nova.network.neutron [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Updating instance_info_cache with network_info: [{"id": "d1369906-26c9-4208-a0a3-68666bf5f09d", "address": "fa:16:3e:94:e5:9c", "network": {"id": "b0e21eb1-e26e-4fb7-a06d-ed39247589ff", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1676427415-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0d2e21208b4b11928219d5972e8b0c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b107fab-ee71-47db-ad4d-3c6f05546843", "external-id": "cl2-zone-554", "segmentation_id": 554, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1369906-26", "ovs_interfaceid": "d1369906-26c9-4208-a0a3-68666bf5f09d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.101277] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1981.101526] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1981.101741] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1981.509871] env[69227]: DEBUG oslo_concurrency.lockutils [req-9534068e-e8aa-491d-8d25-ac101cd887d5 req-93b4373c-d214-4cf7-ab00-4011d98307ef service nova] Releasing lock "refresh_cache-f0394b5e-1437-4e73-9177-0d3f9b1a16ae" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2005.427771] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2008.427336] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.428426] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2010.428820] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2012.428634] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2012.429058] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.427242] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.427490] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2013.931028] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2013.931429] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2013.931661] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2013.931900] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2013.932860] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e16573b-887a-41ec-8117-417850beeca0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.941575] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecc03bfb-3ad0-405b-a748-4b082dccdaff {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.955754] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c49ec78-cecd-45c6-b19d-d137a98bcfc4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.961877] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32069f-68c3-4188-b3ea-ab342183bb82 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.990033] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180930MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2013.990166] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2013.990389] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2015.034955] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1397d96c-8a1d-4940-9b58-148435f12497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035193] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance dcabb6a4-2b08-47df-8687-18431ee85153 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035272] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035399] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035520] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035627] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035749] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035844] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.035957] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.036080] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2015.539129] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2016.044020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2016.044335] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2016.044335] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2016.179824] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58af0c8-ce2b-4427-9305-4c32b841f8e5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.187240] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d36c8a-24f7-4ce8-b8d0-d59aa05fe796 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.216406] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d77157-ecb2-45f0-b08d-aa8e7c2e4ea1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.223326] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789455da-96a6-4353-9179-dbec362f0625 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.235918] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2016.738655] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2017.024431] env[69227]: WARNING oslo_vmware.rw_handles [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2017.024431] env[69227]: ERROR oslo_vmware.rw_handles [ 2017.024844] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2017.027167] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2017.027487] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Copying Virtual Disk [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/617975fc-dc7b-46aa-a8b5-48f916061514/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2017.027702] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fca530df-4343-447e-917f-e72e609e89c8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.036450] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for the task: (returnval){ [ 2017.036450] env[69227]: value = "task-3475201" [ 2017.036450] env[69227]: _type = "Task" [ 2017.036450] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.044534] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Task: {'id': task-3475201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.243643] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2017.243896] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.253s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2017.547603] env[69227]: DEBUG oslo_vmware.exceptions [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2017.547842] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2017.548405] env[69227]: ERROR nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.548405] env[69227]: Faults: ['InvalidArgument'] [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Traceback (most recent call last): [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] yield resources [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self.driver.spawn(context, instance, image_meta, [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self._fetch_image_if_missing(context, vi) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] image_cache(vi, tmp_image_ds_loc) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] vm_util.copy_virtual_disk( [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] session._wait_for_task(vmdk_copy_task) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return self.wait_for_task(task_ref) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return evt.wait() [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] result = hub.switch() [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return self.greenlet.switch() [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self.f(*self.args, **self.kw) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] raise exceptions.translate_fault(task_info.error) [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Faults: ['InvalidArgument'] [ 2017.548405] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] [ 2017.549417] env[69227]: INFO nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Terminating instance [ 2017.550187] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2017.550408] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2017.550644] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deeea844-4e54-4096-b977-83b9de96074d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.552695] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2017.552880] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2017.553575] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1360a9b-250a-46cd-bdbe-430373fc2136 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.560057] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2017.560256] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13101a38-9e5b-4c9d-9a84-95d4d08effb6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.565371] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2017.565540] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2017.566177] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71e31031-495b-42b6-8183-79e32ed237b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.570813] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2017.570813] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52ece6da-a25d-0f17-8e49-bbee63a90cbf" [ 2017.570813] env[69227]: _type = "Task" [ 2017.570813] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.584119] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52ece6da-a25d-0f17-8e49-bbee63a90cbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.653690] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2017.653918] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2017.654152] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Deleting the datastore file [datastore2] 1397d96c-8a1d-4940-9b58-148435f12497 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2017.654432] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-edaebf26-b762-4bb5-8fdd-cd85106d0db8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.661466] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for the task: (returnval){ [ 2017.661466] env[69227]: value = "task-3475203" [ 2017.661466] env[69227]: _type = "Task" [ 2017.661466] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.669171] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Task: {'id': task-3475203, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.081598] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2018.082033] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating directory with path [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2018.082151] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e74b4cf1-3ff4-4a04-ae9b-d428131d63d4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.137267] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created directory with path [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2018.137489] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Fetch image to [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2018.137663] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2018.138538] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b997f5-b820-47f4-a1cc-7cd106b48409 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.145544] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbad72e-9cba-49d9-8bca-072655b4aad8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.155626] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5acfa431-effa-4282-8c18-bb2b2ead7546 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.190340] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4153fbb-788a-4591-9737-d63fcc3a1a2d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.199879] env[69227]: DEBUG oslo_vmware.api [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Task: {'id': task-3475203, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.528138} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.201457] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.201656] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2018.201828] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2018.201995] env[69227]: INFO nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Took 0.65 seconds to destroy the instance on the hypervisor. [ 2018.204127] env[69227]: DEBUG nova.compute.claims [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2018.204303] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2018.204532] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2018.207142] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-dadbe694-9dd0-48bb-ba85-1c6d987bbfea {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.227863] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2018.239367] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.239601] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2018.239752] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2018.239905] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2018.317370] env[69227]: DEBUG oslo_vmware.rw_handles [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2018.377224] env[69227]: DEBUG oslo_vmware.rw_handles [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2018.377421] env[69227]: DEBUG oslo_vmware.rw_handles [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2018.744913] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745084] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745222] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745407] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745545] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745669] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745792] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.745979] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.746069] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.746160] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2018.746280] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2018.868469] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510ea1e4-3693-4355-b6a7-8b847eb08b27 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.875771] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4283cc63-0c29-4f07-a1b4-7a456053b3d7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.905835] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd6842f-a567-498b-ab02-8b0d9ea68f33 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.913218] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f14fb3-4e70-4d6c-a8de-af6990dd1082 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.926231] env[69227]: DEBUG nova.compute.provider_tree [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2019.429423] env[69227]: DEBUG nova.scheduler.client.report [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2019.934267] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.729s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2019.934757] env[69227]: ERROR nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.934757] env[69227]: Faults: ['InvalidArgument'] [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Traceback (most recent call last): [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self.driver.spawn(context, instance, image_meta, [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self._fetch_image_if_missing(context, vi) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] image_cache(vi, tmp_image_ds_loc) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] vm_util.copy_virtual_disk( [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] session._wait_for_task(vmdk_copy_task) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return self.wait_for_task(task_ref) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return evt.wait() [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] result = hub.switch() [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] return self.greenlet.switch() [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] self.f(*self.args, **self.kw) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] raise exceptions.translate_fault(task_info.error) [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Faults: ['InvalidArgument'] [ 2019.934757] env[69227]: ERROR nova.compute.manager [instance: 1397d96c-8a1d-4940-9b58-148435f12497] [ 2019.935652] env[69227]: DEBUG nova.compute.utils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2019.937184] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Build of instance 1397d96c-8a1d-4940-9b58-148435f12497 was re-scheduled: A specified parameter was not correct: fileType [ 2019.937184] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2019.937557] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2019.937736] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2019.937930] env[69227]: DEBUG nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2019.938120] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2020.778139] env[69227]: DEBUG nova.network.neutron [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2021.281265] env[69227]: INFO nova.compute.manager [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Took 1.34 seconds to deallocate network for instance. [ 2022.314026] env[69227]: INFO nova.scheduler.client.report [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Deleted allocations for instance 1397d96c-8a1d-4940-9b58-148435f12497 [ 2022.821106] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ea44fffd-fbec-4ef3-8393-57cd03f45cb7 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.504s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2022.822234] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.209s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2022.822504] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Acquiring lock "1397d96c-8a1d-4940-9b58-148435f12497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2022.822757] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2022.822975] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2022.825515] env[69227]: INFO nova.compute.manager [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Terminating instance [ 2022.827128] env[69227]: DEBUG nova.compute.manager [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2022.827496] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2022.827691] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-427bc444-be70-4d90-b739-f54c5df2288e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.836630] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31366692-90f5-4a46-ad31-c8be5ec20413 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.866179] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1397d96c-8a1d-4940-9b58-148435f12497 could not be found. [ 2022.866421] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2022.866640] env[69227]: INFO nova.compute.manager [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2022.866913] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2022.867187] env[69227]: DEBUG nova.compute.manager [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2022.867322] env[69227]: DEBUG nova.network.neutron [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2023.326159] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 2023.384245] env[69227]: DEBUG nova.network.neutron [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.850017] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2023.850398] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2023.851982] env[69227]: INFO nova.compute.claims [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2023.886723] env[69227]: INFO nova.compute.manager [-] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] Took 1.02 seconds to deallocate network for instance. [ 2024.911314] env[69227]: DEBUG oslo_concurrency.lockutils [None req-10417651-eb34-4e8c-8c8a-5f1bc04bea20 tempest-ImagesTestJSON-2129746614 tempest-ImagesTestJSON-2129746614-project-member] Lock "1397d96c-8a1d-4940-9b58-148435f12497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.089s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2024.912534] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1397d96c-8a1d-4940-9b58-148435f12497" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 236.291s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2024.912534] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1397d96c-8a1d-4940-9b58-148435f12497] During sync_power_state the instance has a pending task (deleting). Skip. [ 2024.912669] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1397d96c-8a1d-4940-9b58-148435f12497" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2024.986743] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53be2c31-2063-4aa6-9cf8-cb8ffdd7a214 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.994880] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e984fe-3c0c-41b3-a0fc-dfc84657d48a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.024256] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c496f1-a2e0-42df-8aa6-d700d2753a2b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.034125] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ceb438f-9637-45c7-a8ee-38b347b2fb2f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.047995] env[69227]: DEBUG nova.compute.provider_tree [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2025.534574] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2025.550694] env[69227]: DEBUG nova.scheduler.client.report [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2026.055796] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.205s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2026.056362] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 2026.560918] env[69227]: DEBUG nova.compute.utils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2026.562421] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 2026.562596] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2026.596959] env[69227]: DEBUG nova.policy [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4163297ae024487943a604b9fd2a71f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dd89399a014fbea28c0afc4d6da8f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 2026.856470] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Successfully created port: 29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.065333] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 2027.345991] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "84d5494a-c08b-45be-a35a-860e64fdf76f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2027.929437] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.074145] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 2028.099014] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2028.099272] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2028.099431] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.099613] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2028.099759] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.099906] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2028.100189] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2028.100361] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2028.100528] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2028.100687] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2028.100857] env[69227]: DEBUG nova.virt.hardware [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2028.101713] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48ab955-70fc-4a91-b5b5-3720ca65d229 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.110277] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4270a459-80b1-4859-bd1a-3ee436f21e5b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.507578] env[69227]: DEBUG nova.compute.manager [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Received event network-vif-plugged-29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2028.507880] env[69227]: DEBUG oslo_concurrency.lockutils [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] Acquiring lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2028.508021] env[69227]: DEBUG oslo_concurrency.lockutils [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2028.508195] env[69227]: DEBUG oslo_concurrency.lockutils [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2028.508362] env[69227]: DEBUG nova.compute.manager [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] No waiting events found dispatching network-vif-plugged-29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2028.508525] env[69227]: WARNING nova.compute.manager [req-b8a2305f-8956-4f5a-b6b3-52ad0424ae60 req-2541d5ec-6d10-46d8-8bc3-f3ae60d93524 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Received unexpected event network-vif-plugged-29ee7847-9265-4fa8-9a4a-7d6c714a3236 for instance with vm_state building and task_state spawning. [ 2028.588602] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Successfully updated port: 29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2029.090776] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.091125] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2029.091125] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2029.623341] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2029.746016] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updating instance_info_cache with network_info: [{"id": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "address": "fa:16:3e:ca:40:9a", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29ee7847-92", "ovs_interfaceid": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2030.248968] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2030.249353] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance network_info: |[{"id": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "address": "fa:16:3e:ca:40:9a", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29ee7847-92", "ovs_interfaceid": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 2030.249746] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:40:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29ee7847-9265-4fa8-9a4a-7d6c714a3236', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2030.257143] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2030.257327] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2030.257956] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c4ea5ee-2aab-4bb1-84eb-592d028bba19 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.279334] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2030.279334] env[69227]: value = "task-3475204" [ 2030.279334] env[69227]: _type = "Task" [ 2030.279334] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.286772] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475204, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.533043] env[69227]: DEBUG nova.compute.manager [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Received event network-changed-29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2030.533280] env[69227]: DEBUG nova.compute.manager [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Refreshing instance network info cache due to event network-changed-29ee7847-9265-4fa8-9a4a-7d6c714a3236. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 2030.533450] env[69227]: DEBUG oslo_concurrency.lockutils [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] Acquiring lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.533598] env[69227]: DEBUG oslo_concurrency.lockutils [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] Acquired lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2030.533753] env[69227]: DEBUG nova.network.neutron [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Refreshing network info cache for port 29ee7847-9265-4fa8-9a4a-7d6c714a3236 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2030.789802] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475204, 'name': CreateVM_Task, 'duration_secs': 0.29017} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2030.789983] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2030.790674] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.790846] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2030.791182] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 2030.791429] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2117e466-bdc2-460d-a7e7-785292551ded {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2030.795456] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2030.795456] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522458e1-7e89-443c-8cb3-0ef70019712f" [ 2030.795456] env[69227]: _type = "Task" [ 2030.795456] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2030.802921] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]522458e1-7e89-443c-8cb3-0ef70019712f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.202747] env[69227]: DEBUG nova.network.neutron [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updated VIF entry in instance network info cache for port 29ee7847-9265-4fa8-9a4a-7d6c714a3236. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2031.203124] env[69227]: DEBUG nova.network.neutron [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updating instance_info_cache with network_info: [{"id": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "address": "fa:16:3e:ca:40:9a", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29ee7847-92", "ovs_interfaceid": "29ee7847-9265-4fa8-9a4a-7d6c714a3236", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.304796] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2031.305152] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2031.305275] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2031.706423] env[69227]: DEBUG oslo_concurrency.lockutils [req-d809ff2e-b599-403f-911c-262b97bd61e3 req-ddc956e5-1206-4dfe-8a8c-bb9431f790c3 service nova] Releasing lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2064.602248] env[69227]: WARNING oslo_vmware.rw_handles [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2064.602248] env[69227]: ERROR oslo_vmware.rw_handles [ 2064.602914] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2064.604876] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2064.605156] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Copying Virtual Disk [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/1eedf9cd-351a-4016-938d-db63dfee6d63/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2064.605471] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1cfce7f-3320-4721-8084-9199a89bf62a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.613332] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2064.613332] env[69227]: value = "task-3475205" [ 2064.613332] env[69227]: _type = "Task" [ 2064.613332] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.620889] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.123247] env[69227]: DEBUG oslo_vmware.exceptions [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2065.123476] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2065.124042] env[69227]: ERROR nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.124042] env[69227]: Faults: ['InvalidArgument'] [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Traceback (most recent call last): [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] yield resources [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self.driver.spawn(context, instance, image_meta, [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self._fetch_image_if_missing(context, vi) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] image_cache(vi, tmp_image_ds_loc) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] vm_util.copy_virtual_disk( [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] session._wait_for_task(vmdk_copy_task) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return self.wait_for_task(task_ref) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return evt.wait() [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] result = hub.switch() [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return self.greenlet.switch() [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self.f(*self.args, **self.kw) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] raise exceptions.translate_fault(task_info.error) [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Faults: ['InvalidArgument'] [ 2065.124042] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] [ 2065.125016] env[69227]: INFO nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Terminating instance [ 2065.125878] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2065.126099] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2065.126331] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc8bf28d-9e49-481b-ac8d-dec60f5b8891 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.129114] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2065.129310] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2065.130014] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ecdbe60-cd02-4a65-965e-8bbf6ec80371 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.136471] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2065.136670] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5fb56c5c-ef6c-40bb-bc43-7c9581da3898 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.138726] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2065.138895] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2065.139832] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a2ec198-4ab0-484d-a1f5-6938cdaf8961 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.145723] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 2065.145723] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f46166-6009-817e-f1c2-2ff5245c5413" [ 2065.145723] env[69227]: _type = "Task" [ 2065.145723] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.152423] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52f46166-6009-817e-f1c2-2ff5245c5413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.213733] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2065.213952] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2065.214149] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleting the datastore file [datastore2] dcabb6a4-2b08-47df-8687-18431ee85153 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2065.214410] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1374ee9e-ccb4-42b9-8d85-95e82cb03f61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.220461] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2065.220461] env[69227]: value = "task-3475207" [ 2065.220461] env[69227]: _type = "Task" [ 2065.220461] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.228144] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475207, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.656067] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2065.656354] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating directory with path [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2065.656567] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddbdead6-6a24-49e4-9348-4531fe60f797 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.667163] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created directory with path [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2065.667360] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Fetch image to [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2065.667533] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2065.668255] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784b933b-520c-4901-b34b-0bacf696a1f0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.674942] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e81b2b-0e93-416d-be55-e444b0a205d6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.683830] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afecc7e4-3a3c-46b6-8cd3-f2d7dcd1708b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.714827] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955cf0b5-36d8-4b62-9760-f3228a98894b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.720038] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-141e8569-b789-4bcc-9f0a-a4714ef518f6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.728774] env[69227]: DEBUG oslo_vmware.api [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475207, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.07494} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.729011] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2065.729201] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2065.729371] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2065.729544] env[69227]: INFO nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2065.731645] env[69227]: DEBUG nova.compute.claims [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2065.731814] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2065.732064] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2065.743054] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2065.793823] env[69227]: DEBUG oslo_vmware.rw_handles [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2065.853691] env[69227]: DEBUG oslo_vmware.rw_handles [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2065.853876] env[69227]: DEBUG oslo_vmware.rw_handles [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2066.356850] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b12bd7e-615b-4953-958d-b323213e075f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.365122] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fdb91c3-5b97-40bb-aa6e-a5b41a90e821 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.395771] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bd8186-e0bf-42b3-882a-a090bf6572db {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.403090] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be258f6-86db-4832-bbdd-7bbba7b4f29f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.417395] env[69227]: DEBUG nova.compute.provider_tree [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.426375] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2066.920177] env[69227]: DEBUG nova.scheduler.client.report [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2067.425584] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.693s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2067.426145] env[69227]: ERROR nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.426145] env[69227]: Faults: ['InvalidArgument'] [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Traceback (most recent call last): [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self.driver.spawn(context, instance, image_meta, [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self._fetch_image_if_missing(context, vi) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] image_cache(vi, tmp_image_ds_loc) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] vm_util.copy_virtual_disk( [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] session._wait_for_task(vmdk_copy_task) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return self.wait_for_task(task_ref) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return evt.wait() [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] result = hub.switch() [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] return self.greenlet.switch() [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] self.f(*self.args, **self.kw) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] raise exceptions.translate_fault(task_info.error) [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Faults: ['InvalidArgument'] [ 2067.426145] env[69227]: ERROR nova.compute.manager [instance: dcabb6a4-2b08-47df-8687-18431ee85153] [ 2067.427018] env[69227]: DEBUG nova.compute.utils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2067.428749] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Build of instance dcabb6a4-2b08-47df-8687-18431ee85153 was re-scheduled: A specified parameter was not correct: fileType [ 2067.428749] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2067.429134] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2067.429312] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2067.429495] env[69227]: DEBUG nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2067.429657] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2068.192651] env[69227]: DEBUG nova.network.neutron [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.427255] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2068.695885] env[69227]: INFO nova.compute.manager [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Took 1.27 seconds to deallocate network for instance. [ 2069.726316] env[69227]: INFO nova.scheduler.client.report [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleted allocations for instance dcabb6a4-2b08-47df-8687-18431ee85153 [ 2070.234658] env[69227]: DEBUG oslo_concurrency.lockutils [None req-75a156fa-68cb-481f-952e-b6543176575a tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.909s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2070.235983] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.509s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2070.236269] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2070.236488] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2070.236661] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2070.240803] env[69227]: INFO nova.compute.manager [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Terminating instance [ 2070.242568] env[69227]: DEBUG nova.compute.manager [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2070.242773] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2070.243050] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b05b0559-6d9b-4380-aec4-550ca55b4d2a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.252583] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa19ba0-bc2d-4fad-95e0-662b24d0e9ad {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.281748] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance dcabb6a4-2b08-47df-8687-18431ee85153 could not be found. [ 2070.281951] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2070.282145] env[69227]: INFO nova.compute.manager [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2070.282384] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2070.282642] env[69227]: DEBUG nova.compute.manager [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2070.282735] env[69227]: DEBUG nova.network.neutron [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2070.426997] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2070.427189] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2070.739529] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 2070.804023] env[69227]: DEBUG nova.network.neutron [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2071.306918] env[69227]: INFO nova.compute.manager [-] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] Took 1.02 seconds to deallocate network for instance. [ 2071.327285] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2071.327563] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2071.329130] env[69227]: INFO nova.compute.claims [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.332347] env[69227]: DEBUG oslo_concurrency.lockutils [None req-a76852aa-1b2b-4ab4-b460-c144d0f2ef95 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.096s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2072.333675] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 283.712s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2072.333675] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: dcabb6a4-2b08-47df-8687-18431ee85153] During sync_power_state the instance has a pending task (deleting). Skip. [ 2072.333675] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "dcabb6a4-2b08-47df-8687-18431ee85153" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2072.447109] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d652937-7a16-4ddf-b321-0c7a159ed681 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.454724] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8a71ba-a59c-4e6f-a890-8245ee5c4a5f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.484629] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acad8212-1d3c-4d67-8b8b-a624b999daa1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.491751] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91799ff-0078-40f8-81e1-a87938eb52f0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.504638] env[69227]: DEBUG nova.compute.provider_tree [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2073.007592] env[69227]: DEBUG nova.scheduler.client.report [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2073.427055] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.427381] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.427381] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 2073.512210] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.184s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2073.512768] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 2073.931198] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 1 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 2073.931412] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 598e8def-9627-4bd6-860b-50370c98b23b] Instance has had 0 of 5 cleanup attempts {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11256}} [ 2074.017994] env[69227]: DEBUG nova.compute.utils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2074.019306] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 2074.019477] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2074.065249] env[69227]: DEBUG nova.policy [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bf5b1a2df6a41bbba456c54f29c2ba5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '732948237883495b892ab3b007d7905d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 2074.405046] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Successfully created port: fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2074.522509] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 2075.434430] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.434627] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2075.434749] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2075.534922] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 2075.562193] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2075.562490] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2075.562659] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2075.562864] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2075.563035] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2075.563189] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2075.563396] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2075.563551] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2075.563713] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2075.563941] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2075.564154] env[69227]: DEBUG nova.virt.hardware [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2075.565011] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2781df5-84ac-400b-a97c-be858f2cd508 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.573227] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41c53921-c9de-4920-afd4-75ac0bfff2ad {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.762526] env[69227]: DEBUG nova.compute.manager [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Received event network-vif-plugged-fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2075.762648] env[69227]: DEBUG oslo_concurrency.lockutils [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] Acquiring lock "0b31dc0b-6a70-41aa-adbe-d989a002adca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2075.762914] env[69227]: DEBUG oslo_concurrency.lockutils [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] Lock "0b31dc0b-6a70-41aa-adbe-d989a002adca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2075.763109] env[69227]: DEBUG oslo_concurrency.lockutils [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] Lock "0b31dc0b-6a70-41aa-adbe-d989a002adca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2075.763306] env[69227]: DEBUG nova.compute.manager [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] No waiting events found dispatching network-vif-plugged-fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2075.763475] env[69227]: WARNING nova.compute.manager [req-e3b286d7-f1ce-42c4-b72f-e41243d76358 req-3769ec70-0710-4547-a96e-fe547fad6bd0 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Received unexpected event network-vif-plugged-fbaa7834-6e02-40b3-862e-ed78348d0e12 for instance with vm_state building and task_state spawning. [ 2075.845107] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Successfully updated port: fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2075.941186] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.941359] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.941494] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.941620] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.941742] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.941861] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.942166] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.942166] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.942312] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.942347] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2075.942443] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2075.942632] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.942806] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2075.942951] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2076.348097] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.348235] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2076.348547] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2076.446025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2076.446131] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2076.446305] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2076.446496] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2076.447405] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7384e2-b54d-4ea3-bcc7-6583b0ae8023 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.455548] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6bb9d11-fc4c-4bc6-af2f-fb829e2f69b7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.469540] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a54deaa-7356-4a90-9b09-c333a12567f5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.476117] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a411e15b-b0a1-4e56-a575-c8295075a950 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.506202] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180954MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2076.506358] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2076.506576] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2076.879014] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2077.225786] env[69227]: DEBUG nova.network.neutron [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Updating instance_info_cache with network_info: [{"id": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "address": "fa:16:3e:8a:75:e8", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaa7834-6e", "ovs_interfaceid": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.538024] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538024] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538024] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538189] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538284] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538339] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538513] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.538661] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.539566] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.539566] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2077.539566] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2077.539566] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2077.555798] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2077.568097] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2077.568378] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2077.579201] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2077.597023] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2077.725633] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9f14a5-b4ca-4a87-8888-b1a10bff6b97 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.728582] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Releasing lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2077.728883] env[69227]: DEBUG nova.compute.manager [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Instance network_info: |[{"id": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "address": "fa:16:3e:8a:75:e8", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaa7834-6e", "ovs_interfaceid": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 2077.729299] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:75:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2928baf1-3efb-4205-a786-d9783e51f699', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbaa7834-6e02-40b3-862e-ed78348d0e12', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.736660] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2077.737347] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2077.737581] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-036f9567-98d8-4364-acbd-e7c439e8c989 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.755249] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ce1e5a-465e-4c43-9242-c4849dc6d6f5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.760332] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.760332] env[69227]: value = "task-3475208" [ 2077.760332] env[69227]: _type = "Task" [ 2077.760332] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.791078] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27363e3-e18d-42fe-a92f-b5c2d0d2f058 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.794678] env[69227]: DEBUG nova.compute.manager [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Received event network-changed-fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2077.794877] env[69227]: DEBUG nova.compute.manager [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Refreshing instance network info cache due to event network-changed-fbaa7834-6e02-40b3-862e-ed78348d0e12. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 2077.795093] env[69227]: DEBUG oslo_concurrency.lockutils [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] Acquiring lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.795237] env[69227]: DEBUG oslo_concurrency.lockutils [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] Acquired lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2077.795399] env[69227]: DEBUG nova.network.neutron [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Refreshing network info cache for port fbaa7834-6e02-40b3-862e-ed78348d0e12 {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2077.800178] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475208, 'name': CreateVM_Task} progress is 15%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.806044] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34c69f78-2f9d-4ab8-852c-3e12b165f27d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.820926] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2078.270347] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475208, 'name': CreateVM_Task, 'duration_secs': 0.30148} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.270812] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2078.271275] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.271454] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2078.271812] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 2078.272085] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5ff72ca-5fa1-4e45-9bed-3daa3d8f179e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.276523] env[69227]: DEBUG oslo_vmware.api [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 2078.276523] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5280f65a-59b5-4394-cf65-a218e87e16ac" [ 2078.276523] env[69227]: _type = "Task" [ 2078.276523] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.284353] env[69227]: DEBUG oslo_vmware.api [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5280f65a-59b5-4394-cf65-a218e87e16ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.324437] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2078.509855] env[69227]: DEBUG nova.network.neutron [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Updated VIF entry in instance network info cache for port fbaa7834-6e02-40b3-862e-ed78348d0e12. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2078.510255] env[69227]: DEBUG nova.network.neutron [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Updating instance_info_cache with network_info: [{"id": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "address": "fa:16:3e:8a:75:e8", "network": {"id": "6474d409-6b66-4fde-b08d-00d5f7922675", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-736730889-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "732948237883495b892ab3b007d7905d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2928baf1-3efb-4205-a786-d9783e51f699", "external-id": "nsx-vlan-transportzone-508", "segmentation_id": 508, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbaa7834-6e", "ovs_interfaceid": "fbaa7834-6e02-40b3-862e-ed78348d0e12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.787991] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2078.788224] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2078.788438] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.829800] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2078.829800] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.323s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2079.014557] env[69227]: DEBUG oslo_concurrency.lockutils [req-44605460-7fd2-40c2-8213-d1a4e548a029 req-e9f6b95d-869d-4a15-8b3d-cd8325e133d3 service nova] Releasing lock "refresh_cache-0b31dc0b-6a70-41aa-adbe-d989a002adca" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2079.818025] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.427061] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.929465] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2082.929735] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 2115.176858] env[69227]: WARNING oslo_vmware.rw_handles [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2115.176858] env[69227]: ERROR oslo_vmware.rw_handles [ 2115.176858] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2115.178635] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2115.178874] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Copying Virtual Disk [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/dadc0331-52ea-4858-aefb-2e6f229dea39/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2115.179183] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ad1be93-1805-48ab-a9c5-09b39ade9a30 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.187331] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 2115.187331] env[69227]: value = "task-3475209" [ 2115.187331] env[69227]: _type = "Task" [ 2115.187331] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.194803] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': task-3475209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.697109] env[69227]: DEBUG oslo_vmware.exceptions [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2115.697392] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2115.697982] env[69227]: ERROR nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2115.697982] env[69227]: Faults: ['InvalidArgument'] [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Traceback (most recent call last): [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] yield resources [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self.driver.spawn(context, instance, image_meta, [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self._fetch_image_if_missing(context, vi) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] image_cache(vi, tmp_image_ds_loc) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] vm_util.copy_virtual_disk( [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] session._wait_for_task(vmdk_copy_task) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return self.wait_for_task(task_ref) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return evt.wait() [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] result = hub.switch() [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return self.greenlet.switch() [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self.f(*self.args, **self.kw) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] raise exceptions.translate_fault(task_info.error) [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Faults: ['InvalidArgument'] [ 2115.697982] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] [ 2115.698888] env[69227]: INFO nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Terminating instance [ 2115.699831] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2115.700057] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2115.700300] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1f02651a-be61-4a22-828f-ce739265811f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.702496] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2115.702688] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2115.703441] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e25b47-18bf-4265-b801-ea483e37f690 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.710485] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2115.710719] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae40f6df-9e16-41c6-8c2d-4cc86ee4dba0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.712888] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2115.713139] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2115.714017] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3fe56248-d3e1-4233-a5e4-c5162b707621 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.718631] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for the task: (returnval){ [ 2115.718631] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5258eeb3-11ed-743f-de39-be7a8edde9f2" [ 2115.718631] env[69227]: _type = "Task" [ 2115.718631] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.733144] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2115.733406] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Creating directory with path [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2115.733632] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eca6665e-c9a9-401e-b611-b887ebfde32b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.756203] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Created directory with path [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2115.756413] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Fetch image to [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2115.756579] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2115.757368] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b26def-c027-4a31-b1d4-144ea307979b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.764400] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9ed608-ce4e-4143-8422-e638179e9eae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.773307] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2877fcf-a8bd-4582-9985-712f6e98ae51 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.804741] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d228123-66cb-4dd5-ba98-731b78e9f745 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.807014] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2115.807215] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2115.807384] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Deleting the datastore file [datastore2] 1c3529ac-4abf-46fe-8b40-1e4222e2150a {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2115.807595] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-89916d36-7bf7-420e-97cd-34dc610a2cf6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.812091] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-402df75b-4a75-4946-9705-cb88d16217d1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.814728] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 2115.814728] env[69227]: value = "task-3475211" [ 2115.814728] env[69227]: _type = "Task" [ 2115.814728] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2115.821748] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': task-3475211, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2115.841900] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2115.902225] env[69227]: DEBUG oslo_vmware.rw_handles [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2115.961936] env[69227]: DEBUG oslo_vmware.rw_handles [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2115.962154] env[69227]: DEBUG oslo_vmware.rw_handles [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2116.324765] env[69227]: DEBUG oslo_vmware.api [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': task-3475211, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075817} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2116.325171] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2116.325245] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2116.325431] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2116.325617] env[69227]: INFO nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Took 0.62 seconds to destroy the instance on the hypervisor. [ 2116.327684] env[69227]: DEBUG nova.compute.claims [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2116.327851] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2116.328106] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2116.952383] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f10a1be-ead5-40c8-ab0d-552b514b86aa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.959526] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c411a2c5-b0b2-432f-95eb-0440c20e60b2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.988944] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fcb18e-6243-400b-82d9-0cac2b7dd79b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.995466] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b62bee5-bba1-468b-9fb9-f598e2d17ecb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.007902] env[69227]: DEBUG nova.compute.provider_tree [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2117.511941] env[69227]: DEBUG nova.scheduler.client.report [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2118.018083] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.690s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2118.018727] env[69227]: ERROR nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.018727] env[69227]: Faults: ['InvalidArgument'] [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Traceback (most recent call last): [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self.driver.spawn(context, instance, image_meta, [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self._fetch_image_if_missing(context, vi) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] image_cache(vi, tmp_image_ds_loc) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] vm_util.copy_virtual_disk( [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] session._wait_for_task(vmdk_copy_task) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return self.wait_for_task(task_ref) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return evt.wait() [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] result = hub.switch() [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] return self.greenlet.switch() [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] self.f(*self.args, **self.kw) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] raise exceptions.translate_fault(task_info.error) [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Faults: ['InvalidArgument'] [ 2118.018727] env[69227]: ERROR nova.compute.manager [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] [ 2118.019694] env[69227]: DEBUG nova.compute.utils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2118.021203] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Build of instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a was re-scheduled: A specified parameter was not correct: fileType [ 2118.021203] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2118.021580] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2118.021753] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2118.021920] env[69227]: DEBUG nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2118.022098] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2118.803987] env[69227]: DEBUG nova.network.neutron [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.306722] env[69227]: INFO nova.compute.manager [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Took 1.28 seconds to deallocate network for instance. [ 2120.340031] env[69227]: INFO nova.scheduler.client.report [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Deleted allocations for instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a [ 2120.850685] env[69227]: DEBUG oslo_concurrency.lockutils [None req-28d8de5a-2919-4a27-88c5-cb9ac7cb9730 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 572.226s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2120.850954] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 376.863s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2120.851228] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2120.851440] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2120.851607] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2120.854056] env[69227]: INFO nova.compute.manager [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Terminating instance [ 2120.855758] env[69227]: DEBUG nova.compute.manager [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2120.855953] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2120.856233] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d79fe807-7553-4853-99ca-97bf02ee515d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.864779] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7775650-5973-4dc2-bbf4-75989d041dfb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.893473] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1c3529ac-4abf-46fe-8b40-1e4222e2150a could not be found. [ 2120.893660] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2120.893841] env[69227]: INFO nova.compute.manager [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2120.894093] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2120.894317] env[69227]: DEBUG nova.compute.manager [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2120.894412] env[69227]: DEBUG nova.network.neutron [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2121.411657] env[69227]: DEBUG nova.network.neutron [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2121.916062] env[69227]: INFO nova.compute.manager [-] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] Took 1.02 seconds to deallocate network for instance. [ 2122.942344] env[69227]: DEBUG oslo_concurrency.lockutils [None req-49b70dae-d011-4861-a672-110744b68f7a tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.091s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2122.943225] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 334.322s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2122.943424] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 1c3529ac-4abf-46fe-8b40-1e4222e2150a] During sync_power_state the instance has a pending task (deleting). Skip. [ 2122.943601] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "1c3529ac-4abf-46fe-8b40-1e4222e2150a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2126.929611] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2128.427467] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.427481] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2130.427773] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2134.427637] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2134.427919] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2134.427919] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2134.933105] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933280] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933397] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933474] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933594] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933716] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933835] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.933982] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.934077] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2134.934193] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2134.934401] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.427225] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.427512] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.427629] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.427782] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2136.930985] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2136.931254] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2136.931451] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2136.931609] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2136.932479] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1115caf8-0c4d-412b-aa42-8bae8dcda766 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.941797] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-683eecef-44aa-40a0-ad9c-326b9c783a9d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.955455] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb62915e-fe93-4f40-8851-7c6a20d67221 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.961967] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed35f85-60b9-4c83-8220-c5177238888e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.991515] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180969MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2136.991679] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2136.991905] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2137.392844] env[69227]: DEBUG oslo_concurrency.lockutils [None req-ca1329a0-acef-4dc7-b4cc-540da3b7a84b tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquiring lock "0b31dc0b-6a70-41aa-adbe-d989a002adca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2138.026651] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 53fae914-75b0-414e-b3ce-9d8be3462039 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.026972] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.026972] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.027086] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.027194] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.027312] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.027428] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.028200] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.028200] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2138.028200] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2138.028200] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2138.152761] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35bacb9-2ced-4ca5-9b83-50ade02afa58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.160413] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c8d959-a3ae-41d1-bcf7-e48e21f6d26e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.190139] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0738363f-8809-4ec9-a709-105a8534146b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.197429] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e94e285-20a2-4666-ae2d-1647fa902632 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.211591] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2138.714985] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2139.221394] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2139.221663] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.230s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2144.220516] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2144.220782] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2144.724178] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 2145.246345] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2145.246621] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2145.248123] env[69227]: INFO nova.compute.claims [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2146.363776] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2bf8bc-4daf-4a9e-9320-d799ed5efc6e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.371281] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5842807-8a54-4438-80f2-bd3aa97d447c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.401877] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7650bc52-7fb5-44c7-8df6-c6ab1e7e466c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.408703] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30ea2cb-f621-4e8e-a2bc-3497a0b4a0fb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.421369] env[69227]: DEBUG nova.compute.provider_tree [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2146.924726] env[69227]: DEBUG nova.scheduler.client.report [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2147.429954] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.183s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2147.430436] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 2147.935293] env[69227]: DEBUG nova.compute.utils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2147.936669] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 2147.936839] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2147.971132] env[69227]: DEBUG nova.policy [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8e1869fbfffc40e0a5928efbdf5f2a02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00727f8cf5ba447f88abe6ab2c951aa7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 2148.235653] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Successfully created port: 6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2148.440620] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 2149.450494] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 2149.475154] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2149.475434] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2149.475596] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2149.475779] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2149.475926] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2149.476088] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2149.476297] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2149.476516] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2149.476613] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2149.476778] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2149.476950] env[69227]: DEBUG nova.virt.hardware [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2149.477832] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44088f69-400d-4732-99a7-0aaac3190cd5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.485561] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ef2819-2fca-4ebc-bba7-cdee90267085 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.576649] env[69227]: DEBUG nova.compute.manager [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Received event network-vif-plugged-6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2149.576885] env[69227]: DEBUG oslo_concurrency.lockutils [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] Acquiring lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2149.577132] env[69227]: DEBUG oslo_concurrency.lockutils [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] Lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2149.577308] env[69227]: DEBUG oslo_concurrency.lockutils [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] Lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2149.577471] env[69227]: DEBUG nova.compute.manager [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] No waiting events found dispatching network-vif-plugged-6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2149.577634] env[69227]: WARNING nova.compute.manager [req-720402e5-38c8-4aff-88e3-537d7547fb32 req-26e494f4-4024-4695-9f78-b914d04a5c8c service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Received unexpected event network-vif-plugged-6bf73a86-a061-4a5f-8ad2-782ba7b0476a for instance with vm_state building and task_state spawning. [ 2149.654658] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Successfully updated port: 6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2150.157424] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2150.157588] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2150.157740] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2150.689105] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2150.802822] env[69227]: DEBUG nova.network.neutron [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Updating instance_info_cache with network_info: [{"id": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "address": "fa:16:3e:cc:9a:7b", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf73a86-a0", "ovs_interfaceid": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2151.305858] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Releasing lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2151.306195] env[69227]: DEBUG nova.compute.manager [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Instance network_info: |[{"id": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "address": "fa:16:3e:cc:9a:7b", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf73a86-a0", "ovs_interfaceid": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 2151.306638] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:9a:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6365036-aa37-44d2-90d1-ca1c3516ded9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bf73a86-a061-4a5f-8ad2-782ba7b0476a', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2151.314141] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2151.314343] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2151.314564] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-36d161cb-59d2-4b53-b18e-de0024bb0e16 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.334525] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2151.334525] env[69227]: value = "task-3475212" [ 2151.334525] env[69227]: _type = "Task" [ 2151.334525] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.341868] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475212, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.603322] env[69227]: DEBUG nova.compute.manager [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Received event network-changed-6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2151.603524] env[69227]: DEBUG nova.compute.manager [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Refreshing instance network info cache due to event network-changed-6bf73a86-a061-4a5f-8ad2-782ba7b0476a. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 2151.603750] env[69227]: DEBUG oslo_concurrency.lockutils [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] Acquiring lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.603860] env[69227]: DEBUG oslo_concurrency.lockutils [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] Acquired lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2151.604228] env[69227]: DEBUG nova.network.neutron [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Refreshing network info cache for port 6bf73a86-a061-4a5f-8ad2-782ba7b0476a {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2151.844180] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475212, 'name': CreateVM_Task, 'duration_secs': 0.290389} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.844543] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2151.844974] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.845153] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2151.845459] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 2151.845702] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c708dd30-1667-415e-ba66-6c4a20e510f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.849792] env[69227]: DEBUG oslo_vmware.api [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for the task: (returnval){ [ 2151.849792] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d3ee31-b8d3-3a67-b640-a77b0c397b05" [ 2151.849792] env[69227]: _type = "Task" [ 2151.849792] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.856871] env[69227]: DEBUG oslo_vmware.api [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52d3ee31-b8d3-3a67-b640-a77b0c397b05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2152.321685] env[69227]: DEBUG nova.network.neutron [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Updated VIF entry in instance network info cache for port 6bf73a86-a061-4a5f-8ad2-782ba7b0476a. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2152.322055] env[69227]: DEBUG nova.network.neutron [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Updating instance_info_cache with network_info: [{"id": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "address": "fa:16:3e:cc:9a:7b", "network": {"id": "04086ef4-95f1-4c4e-8b59-4954ddff44a6", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1715257717-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00727f8cf5ba447f88abe6ab2c951aa7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bf73a86-a0", "ovs_interfaceid": "6bf73a86-a061-4a5f-8ad2-782ba7b0476a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.359708] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2152.359953] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2152.360177] env[69227]: DEBUG oslo_concurrency.lockutils [None req-6d1230c8-8eab-4ceb-a961-88a4edafc1ed tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2152.824835] env[69227]: DEBUG oslo_concurrency.lockutils [req-0f006176-c87e-4f92-8ff3-66683dd3b60c req-c3a8ef10-6e88-4d5d-a869-bd9d0f2c3f49 service nova] Releasing lock "refresh_cache-2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2155.217209] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2164.664731] env[69227]: WARNING oslo_vmware.rw_handles [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2164.664731] env[69227]: ERROR oslo_vmware.rw_handles [ 2164.665450] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2164.667363] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2164.667621] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Copying Virtual Disk [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/1824fcd1-a227-4c83-96d8-4b7aaeda11f5/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2164.667911] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bd0ab97-9c3e-4ae3-9d65-09073c15a47f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2164.675319] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for the task: (returnval){ [ 2164.675319] env[69227]: value = "task-3475213" [ 2164.675319] env[69227]: _type = "Task" [ 2164.675319] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2164.682765] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Task: {'id': task-3475213, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.190311] env[69227]: DEBUG oslo_vmware.exceptions [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2165.190737] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2165.191668] env[69227]: ERROR nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2165.191668] env[69227]: Faults: ['InvalidArgument'] [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Traceback (most recent call last): [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] yield resources [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self.driver.spawn(context, instance, image_meta, [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self._fetch_image_if_missing(context, vi) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] image_cache(vi, tmp_image_ds_loc) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] vm_util.copy_virtual_disk( [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] session._wait_for_task(vmdk_copy_task) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return self.wait_for_task(task_ref) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return evt.wait() [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] result = hub.switch() [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return self.greenlet.switch() [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self.f(*self.args, **self.kw) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] raise exceptions.translate_fault(task_info.error) [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Faults: ['InvalidArgument'] [ 2165.191668] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] [ 2165.192964] env[69227]: INFO nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Terminating instance [ 2165.194298] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2165.194634] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2165.195333] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2165.195605] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2165.195892] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2165.197152] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1bd20b13-6e35-4ec6-841c-9ce984d9533d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.206884] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2165.207178] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2165.208227] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-370fab22-251a-415b-8188-b31a67b1d554 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.215348] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for the task: (returnval){ [ 2165.215348] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52726ea3-1208-951f-62fc-c134be3bdc26" [ 2165.215348] env[69227]: _type = "Task" [ 2165.215348] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2165.226324] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52726ea3-1208-951f-62fc-c134be3bdc26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2165.717156] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2165.727979] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2165.728243] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Creating directory with path [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2165.728468] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9fab6b8-c5f6-4f20-929c-1976568fa7bb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.747884] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Created directory with path [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2165.748130] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Fetch image to [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2165.748342] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2165.749127] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aa0a47d-35a4-45bd-8dbf-1226c55f55c5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.756213] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7679250-e048-499b-9908-420c751009cc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.765068] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4728a0fc-c432-42eb-ac7a-01a0867501b4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.769036] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.798180] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9246f4-d9ac-48c0-967b-f9c9b0b8fe78 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.803980] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fa627445-9ffe-4c3b-9f3d-ddece4e5d824 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.827245] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2165.872462] env[69227]: DEBUG oslo_vmware.rw_handles [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2165.932516] env[69227]: DEBUG oslo_vmware.rw_handles [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2165.932711] env[69227]: DEBUG oslo_vmware.rw_handles [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2166.271437] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Releasing lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2166.271910] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2166.272204] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2166.273081] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5340bb0c-998c-48e5-b2d4-ea3bd166dbd5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.280784] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2166.281019] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c1ffcfd-007b-435f-a7bc-fcb38ed2d6a2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.309518] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2166.309736] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2166.309885] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Deleting the datastore file [datastore2] 53fae914-75b0-414e-b3ce-9d8be3462039 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2166.310143] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d04c0308-12e0-4ce3-b5ad-08102571d829 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.316695] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for the task: (returnval){ [ 2166.316695] env[69227]: value = "task-3475215" [ 2166.316695] env[69227]: _type = "Task" [ 2166.316695] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.324151] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Task: {'id': task-3475215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.826080] env[69227]: DEBUG oslo_vmware.api [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Task: {'id': task-3475215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031559} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.826370] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2166.826511] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2166.826677] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2166.826849] env[69227]: INFO nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2166.827091] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2166.827297] env[69227]: DEBUG nova.compute.manager [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2166.829301] env[69227]: DEBUG nova.compute.claims [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2166.829471] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2166.829708] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2167.447848] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7348729d-c87a-4b4b-9409-8da8336db581 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.455532] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba2caac-25b4-4bd7-8b9b-8a0fbca822e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.485054] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e80ff24c-ec08-458f-81cc-64f3352afbd8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.492453] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8c771a-7720-4404-8b9f-20aabe706651 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.506047] env[69227]: DEBUG nova.compute.provider_tree [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.008753] env[69227]: DEBUG nova.scheduler.client.report [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2168.514055] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.684s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2168.514325] env[69227]: ERROR nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.514325] env[69227]: Faults: ['InvalidArgument'] [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Traceback (most recent call last): [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self.driver.spawn(context, instance, image_meta, [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self._fetch_image_if_missing(context, vi) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] image_cache(vi, tmp_image_ds_loc) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] vm_util.copy_virtual_disk( [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] session._wait_for_task(vmdk_copy_task) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return self.wait_for_task(task_ref) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return evt.wait() [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] result = hub.switch() [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] return self.greenlet.switch() [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] self.f(*self.args, **self.kw) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] raise exceptions.translate_fault(task_info.error) [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Faults: ['InvalidArgument'] [ 2168.514325] env[69227]: ERROR nova.compute.manager [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] [ 2168.515102] env[69227]: DEBUG nova.compute.utils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2168.516703] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Build of instance 53fae914-75b0-414e-b3ce-9d8be3462039 was re-scheduled: A specified parameter was not correct: fileType [ 2168.516703] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2168.517100] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2168.517324] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2168.517472] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2168.517626] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2169.034963] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2169.077751] env[69227]: DEBUG nova.network.neutron [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2169.580785] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Releasing lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2169.581061] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2169.581260] env[69227]: DEBUG nova.compute.manager [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2170.615167] env[69227]: INFO nova.scheduler.client.report [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Deleted allocations for instance 53fae914-75b0-414e-b3ce-9d8be3462039 [ 2171.123081] env[69227]: DEBUG oslo_concurrency.lockutils [None req-92ee2a4b-19f0-4cac-bbec-9cb566a1a409 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 562.240s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2171.123373] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 382.502s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2171.123572] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] During sync_power_state the instance has a pending task (spawning). Skip. [ 2171.123780] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2171.124047] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 366.010s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2171.124261] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "53fae914-75b0-414e-b3ce-9d8be3462039-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2171.124472] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2171.124634] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2171.129918] env[69227]: INFO nova.compute.manager [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Terminating instance [ 2171.131561] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquiring lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2171.131937] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Acquired lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2171.131937] env[69227]: DEBUG nova.network.neutron [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2171.651067] env[69227]: DEBUG nova.network.neutron [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2171.696243] env[69227]: DEBUG nova.network.neutron [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2172.198603] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Releasing lock "refresh_cache-53fae914-75b0-414e-b3ce-9d8be3462039" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2172.198997] env[69227]: DEBUG nova.compute.manager [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2172.199211] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2172.199520] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7870ee75-78eb-4af3-8fef-83428b093787 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.208376] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e960ea7-0ffd-479d-b813-c7bfe40d31b3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.236653] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 53fae914-75b0-414e-b3ce-9d8be3462039 could not be found. [ 2172.236851] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2172.237041] env[69227]: INFO nova.compute.manager [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2172.237289] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2172.237512] env[69227]: DEBUG nova.compute.manager [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2172.237605] env[69227]: DEBUG nova.network.neutron [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2172.252065] env[69227]: DEBUG nova.network.neutron [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2172.754744] env[69227]: DEBUG nova.network.neutron [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.258024] env[69227]: INFO nova.compute.manager [-] [instance: 53fae914-75b0-414e-b3ce-9d8be3462039] Took 1.02 seconds to deallocate network for instance. [ 2174.284352] env[69227]: DEBUG oslo_concurrency.lockutils [None req-8d77d299-3198-4b1b-b6d0-2237eb572820 tempest-ServersListShow296Test-1428636893 tempest-ServersListShow296Test-1428636893-project-member] Lock "53fae914-75b0-414e-b3ce-9d8be3462039" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.160s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2187.426621] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2190.427051] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2192.426921] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2192.427337] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2194.428586] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.426646] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.426985] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2196.426985] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2196.931841] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932042] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932170] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932306] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932427] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932546] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932666] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932782] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.932896] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2196.933039] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2196.933247] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2196.933419] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.427598] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2197.930623] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2197.930872] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2197.931090] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2197.931292] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2197.932263] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a2c8fc4-b35a-418a-8ee6-2be3b7b913bc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.941159] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d2b8ec-dec1-4e9d-a869-f9a797d4226d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.956706] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5feefa9b-d2b2-4d1d-92d8-28f951c20485 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.963673] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5017517-532d-4398-988f-c0bf62f24cc2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.994116] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180965MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2197.994343] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2197.994561] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2199.025097] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 9944282c-d21a-40b2-9143-f76c288860ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025453] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025453] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025559] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025612] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025712] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025828] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.025941] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.026069] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2199.026274] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2199.026418] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2199.139750] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068729ee-eba4-42ba-8e85-0f5f73575d3d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.147417] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d1fc25-781d-4cfe-849d-282e1cae0df6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.177067] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810bc4c1-9d47-47eb-9f88-5862752cb997 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.184456] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f9854a-31ee-49a5-9023-cb11b9835778 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.198187] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2199.701874] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2200.206766] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2200.207174] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.212s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2201.202612] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2211.344732] env[69227]: WARNING oslo_vmware.rw_handles [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2211.344732] env[69227]: ERROR oslo_vmware.rw_handles [ 2211.345472] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2211.347302] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2211.347545] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Copying Virtual Disk [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/f5601025-57c3-4c7e-9e8a-8440ea975d99/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2211.347823] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9332db38-d9dc-4885-995d-edcf452272bf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.355620] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for the task: (returnval){ [ 2211.355620] env[69227]: value = "task-3475216" [ 2211.355620] env[69227]: _type = "Task" [ 2211.355620] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.363753] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Task: {'id': task-3475216, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.866207] env[69227]: DEBUG oslo_vmware.exceptions [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2211.866488] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2211.867077] env[69227]: ERROR nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2211.867077] env[69227]: Faults: ['InvalidArgument'] [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Traceback (most recent call last): [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] yield resources [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.driver.spawn(context, instance, image_meta, [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._fetch_image_if_missing(context, vi) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] image_cache(vi, tmp_image_ds_loc) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] vm_util.copy_virtual_disk( [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] session._wait_for_task(vmdk_copy_task) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.wait_for_task(task_ref) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return evt.wait() [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] result = hub.switch() [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.greenlet.switch() [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.f(*self.args, **self.kw) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] raise exceptions.translate_fault(task_info.error) [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Faults: ['InvalidArgument'] [ 2211.867077] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] [ 2211.868022] env[69227]: INFO nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Terminating instance [ 2211.868871] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2211.869096] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2211.869343] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b18ab1b-73c4-486b-8fb0-daa09d4141f1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.871474] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2211.871632] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2211.871801] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2211.878881] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2211.879074] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2211.879765] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c6630f3-50d6-4778-a0c3-2a7e4908b610 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.885453] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for the task: (returnval){ [ 2211.885453] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]527718c2-0ba1-f64d-79f5-1e992b8ae24e" [ 2211.885453] env[69227]: _type = "Task" [ 2211.885453] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.892942] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]527718c2-0ba1-f64d-79f5-1e992b8ae24e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.395590] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2212.395890] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Creating directory with path [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2212.396085] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bceffd5-b22d-4044-833f-ed1aaa186a97 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.415934] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Created directory with path [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2212.416165] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Fetch image to [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2212.416342] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2212.417116] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e60213-5354-4d2b-a850-735800c18a61 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.423974] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97f7230-3246-4221-bfc3-a286a2fd9c74 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.432877] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2ddd916-7c26-4311-9be0-5cab7dfdc9a5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.464378] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2212.466547] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd619e30-fd43-4044-9d1c-54f0c7e512bc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.471776] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3c4c30cc-3564-4bc0-ade7-657379121c1e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.492233] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2212.517539] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2212.545563] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2212.606882] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2212.607107] env[69227]: DEBUG oslo_vmware.rw_handles [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2213.023168] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Releasing lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2213.023736] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2213.023975] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2213.025017] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e4ff43-2555-481d-9cef-e5f98296936a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.032853] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2213.033108] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20101249-6014-43b8-a22a-38b679958c8c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.061270] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2213.061472] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2213.061650] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Deleting the datastore file [datastore2] 9944282c-d21a-40b2-9143-f76c288860ef {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2213.061905] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f3283236-bce6-4b55-9ba6-809b6bb95071 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.067391] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for the task: (returnval){ [ 2213.067391] env[69227]: value = "task-3475218" [ 2213.067391] env[69227]: _type = "Task" [ 2213.067391] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2213.074639] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Task: {'id': task-3475218, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.577370] env[69227]: DEBUG oslo_vmware.api [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Task: {'id': task-3475218, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.029523} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2213.577731] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2213.577862] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2213.578056] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2213.578232] env[69227]: INFO nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2213.578469] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2213.578669] env[69227]: DEBUG nova.compute.manager [-] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2213.580840] env[69227]: DEBUG nova.compute.claims [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2213.581021] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2213.581258] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2214.193041] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876c8d6b-e2f9-4b76-8557-ad53e9708ca0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.200482] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d111148-ab60-4634-9bae-49370a47929b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.230712] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb18f9e-6051-4fd1-9008-6aaf245e5d57 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.237614] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32456c02-71d5-4695-bfa7-f3fc4da81391 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.250344] env[69227]: DEBUG nova.compute.provider_tree [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2214.753186] env[69227]: DEBUG nova.scheduler.client.report [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2215.258464] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.677s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2215.259087] env[69227]: ERROR nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.259087] env[69227]: Faults: ['InvalidArgument'] [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Traceback (most recent call last): [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.driver.spawn(context, instance, image_meta, [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._fetch_image_if_missing(context, vi) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] image_cache(vi, tmp_image_ds_loc) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] vm_util.copy_virtual_disk( [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] session._wait_for_task(vmdk_copy_task) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.wait_for_task(task_ref) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return evt.wait() [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] result = hub.switch() [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.greenlet.switch() [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.f(*self.args, **self.kw) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] raise exceptions.translate_fault(task_info.error) [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Faults: ['InvalidArgument'] [ 2215.259087] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] [ 2215.260096] env[69227]: DEBUG nova.compute.utils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2215.261566] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Build of instance 9944282c-d21a-40b2-9143-f76c288860ef was re-scheduled: A specified parameter was not correct: fileType [ 2215.261566] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2215.261933] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2215.262171] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.262364] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2215.262531] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2215.780333] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2215.822155] env[69227]: DEBUG nova.network.neutron [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.324477] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Releasing lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2216.325083] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2216.325083] env[69227]: DEBUG nova.compute.manager [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2217.351354] env[69227]: INFO nova.scheduler.client.report [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Deleted allocations for instance 9944282c-d21a-40b2-9143-f76c288860ef [ 2217.860289] env[69227]: DEBUG oslo_concurrency.lockutils [None req-697c2fe5-b4ce-4423-ac8a-8217e121492c tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 605.256s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2217.860545] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "9944282c-d21a-40b2-9143-f76c288860ef" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 429.239s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2217.860696] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] During sync_power_state the instance has a pending task (spawning). Skip. [ 2217.860913] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "9944282c-d21a-40b2-9143-f76c288860ef" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2217.861173] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 409.494s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2217.861389] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "9944282c-d21a-40b2-9143-f76c288860ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2217.861599] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2217.861764] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2217.863851] env[69227]: INFO nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Terminating instance [ 2217.865419] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquiring lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2217.865577] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Acquired lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2217.865741] env[69227]: DEBUG nova.network.neutron [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2218.385460] env[69227]: DEBUG nova.network.neutron [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2218.439247] env[69227]: DEBUG nova.network.neutron [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2218.941970] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Releasing lock "refresh_cache-9944282c-d21a-40b2-9143-f76c288860ef" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2218.942459] env[69227]: DEBUG nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2218.942673] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2218.943386] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dee2b15e-0aa0-4fb9-b6db-27ab006733f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.952213] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f14c0-4811-45c2-9077-bf08fb36d72b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.980189] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9944282c-d21a-40b2-9143-f76c288860ef could not be found. [ 2218.980409] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2218.980612] env[69227]: INFO nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2218.980851] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2218.981086] env[69227]: DEBUG nova.compute.manager [-] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2218.981189] env[69227]: DEBUG nova.network.neutron [-] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2219.081367] env[69227]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2219.081617] env[69227]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [-] Dynamic interval looping call 'oslo_service.backend.eventlet.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall exception_handler_v20(status_code, error_body) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise client_exc(message=error_message, [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Neutron server returns request_ids: ['req-7fb2b793-fd1e-473c-ad66-a1a767cba335'] [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall During handling of the above exception, another exception occurred: [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = func(*self.args, **self.kw) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = f(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._deallocate_network( [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self.network_api.deallocate_for_instance( [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall data = neutron.list_ports(**search_opts) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall for r in self._pagination(collection, path, **params): [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall res = self.get(path, params=params) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.retry_request("GET", action, body=body, [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.do_request(method, action, body=body, [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2219.082174] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2219.083542] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2219.083542] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2219.083542] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2219.083542] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2219.083542] env[69227]: ERROR nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2219.587713] env[69227]: ERROR nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Traceback (most recent call last): [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] exception_handler_v20(status_code, error_body) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] raise client_exc(message=error_message, [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Neutron server returns request_ids: ['req-7fb2b793-fd1e-473c-ad66-a1a767cba335'] [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] During handling of the above exception, another exception occurred: [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Traceback (most recent call last): [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._delete_instance(context, instance, bdms) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._shutdown_instance(context, instance, bdms) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._try_deallocate_network(context, instance, requested_networks) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] with excutils.save_and_reraise_exception(): [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.force_reraise() [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] raise self.value [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] _deallocate_network_with_retries() [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return evt.wait() [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] result = hub.switch() [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.greenlet.switch() [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] result = func(*self.args, **self.kw) [ 2219.587713] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] result = f(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._deallocate_network( [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self.network_api.deallocate_for_instance( [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] data = neutron.list_ports(**search_opts) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.list('ports', self.ports_path, retrieve_all, [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] for r in self._pagination(collection, path, **params): [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] res = self.get(path, params=params) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.retry_request("GET", action, body=body, [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] return self.do_request(method, action, body=body, [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] ret = obj(*args, **kwargs) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] self._handle_fault_response(status_code, replybody, resp) [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2219.589099] env[69227]: ERROR nova.compute.manager [instance: 9944282c-d21a-40b2-9143-f76c288860ef] [ 2220.092091] env[69227]: DEBUG oslo_concurrency.lockutils [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Lock "9944282c-d21a-40b2-9143-f76c288860ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.231s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2221.602013] env[69227]: INFO nova.compute.manager [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] [instance: 9944282c-d21a-40b2-9143-f76c288860ef] Successfully reverted task state from None on failure for instance. [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server [None req-06720bd3-8058-4b2a-abe6-f0f80ac9039e tempest-ServerShowV257Test-995342813 tempest-ServerShowV257Test-995342813-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-7fb2b793-fd1e-473c-ad66-a1a767cba335'] [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 2221.605786] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2221.607442] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2221.608823] env[69227]: ERROR oslo_messaging.rpc.server [ 2225.111243] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "247d620f-a6ed-4b40-9cc5-269c6f99c487" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2225.111557] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "247d620f-a6ed-4b40-9cc5-269c6f99c487" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2225.613886] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Starting instance... {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 2226.135724] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2226.135995] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2226.137404] env[69227]: INFO nova.compute.claims [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2227.256956] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb1a7f2-e7a9-491d-9e38-0ba46f3ea376 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.264610] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b62d94c-1556-4667-aed0-4831874c99ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.294426] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed497dca-9986-4a01-90ab-6a11c082e0c1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.301632] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a55b88f4-11a1-4a71-9813-bacdf2107208 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.314728] env[69227]: DEBUG nova.compute.provider_tree [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2227.818309] env[69227]: DEBUG nova.scheduler.client.report [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2228.323455] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.187s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2228.324044] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Start building networks asynchronously for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 2228.828701] env[69227]: DEBUG nova.compute.utils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Using /dev/sd instead of None {{(pid=69227) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2228.830075] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Allocating IP information in the background. {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 2228.830253] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] allocate_for_instance() {{(pid=69227) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2228.865946] env[69227]: DEBUG nova.policy [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd4163297ae024487943a604b9fd2a71f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52dd89399a014fbea28c0afc4d6da8f5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69227) authorize /opt/stack/nova/nova/policy.py:203}} [ 2229.103716] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Successfully created port: 313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2229.333256] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Start building block device mappings for instance. {{(pid=69227) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 2230.344519] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Start spawning the instance on the hypervisor. {{(pid=69227) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 2230.369213] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-17T12:15:32Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-17T12:15:13Z,direct_url=,disk_format='vmdk',id=78c61090-3613-43e2-b8eb-045dfd47af0c,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='f9a97342f8234df5a8f3fca89b9f407b',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-17T12:15:14Z,virtual_size=,visibility=), allow threads: False {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2230.369470] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2230.369624] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image limits 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2230.369804] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Flavor pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2230.369947] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Image pref 0:0:0 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2230.370179] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69227) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2230.370401] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2230.370582] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2230.370764] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Got 1 possible topologies {{(pid=69227) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2230.370924] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2230.371108] env[69227]: DEBUG nova.virt.hardware [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69227) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2230.371972] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c977cf53-fc28-443a-bb46-4512c25ee53b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.380144] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de0a1b56-e2e3-499d-b173-5ba6de9c4f92 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.452465] env[69227]: DEBUG nova.compute.manager [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Received event network-vif-plugged-313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2230.452465] env[69227]: DEBUG oslo_concurrency.lockutils [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] Acquiring lock "247d620f-a6ed-4b40-9cc5-269c6f99c487-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2230.452465] env[69227]: DEBUG oslo_concurrency.lockutils [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] Lock "247d620f-a6ed-4b40-9cc5-269c6f99c487-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2230.452465] env[69227]: DEBUG oslo_concurrency.lockutils [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] Lock "247d620f-a6ed-4b40-9cc5-269c6f99c487-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2230.452465] env[69227]: DEBUG nova.compute.manager [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] No waiting events found dispatching network-vif-plugged-313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2230.452465] env[69227]: WARNING nova.compute.manager [req-fe1fa11f-fded-4564-9400-7c53f2a84b8e req-1466f0bb-1191-4d79-8aef-4f155c2cc098 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Received unexpected event network-vif-plugged-313ebea7-0f16-4d3e-a65f-886b04c1b56d for instance with vm_state building and task_state spawning. [ 2230.569115] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Successfully updated port: 313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2231.072213] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2231.072375] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2231.072629] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2231.602246] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2231.751377] env[69227]: DEBUG nova.network.neutron [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Updating instance_info_cache with network_info: [{"id": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "address": "fa:16:3e:21:a8:ba", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap313ebea7-0f", "ovs_interfaceid": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2232.254323] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2232.254655] env[69227]: DEBUG nova.compute.manager [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Instance network_info: |[{"id": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "address": "fa:16:3e:21:a8:ba", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap313ebea7-0f", "ovs_interfaceid": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69227) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 2232.255113] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:a8:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '313ebea7-0f16-4d3e-a65f-886b04c1b56d', 'vif_model': 'vmxnet3'}] {{(pid=69227) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2232.262455] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2232.262685] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Creating VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2232.262905] env[69227]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9958b4f7-ac1e-4104-b906-808cb92f55d0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.282534] env[69227]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2232.282534] env[69227]: value = "task-3475219" [ 2232.282534] env[69227]: _type = "Task" [ 2232.282534] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.290404] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475219, 'name': CreateVM_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.483185] env[69227]: DEBUG nova.compute.manager [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Received event network-changed-313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11146}} [ 2232.483388] env[69227]: DEBUG nova.compute.manager [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Refreshing instance network info cache due to event network-changed-313ebea7-0f16-4d3e-a65f-886b04c1b56d. {{(pid=69227) external_instance_event /opt/stack/nova/nova/compute/manager.py:11151}} [ 2232.483657] env[69227]: DEBUG oslo_concurrency.lockutils [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] Acquiring lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2232.483870] env[69227]: DEBUG oslo_concurrency.lockutils [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] Acquired lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2232.483953] env[69227]: DEBUG nova.network.neutron [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Refreshing network info cache for port 313ebea7-0f16-4d3e-a65f-886b04c1b56d {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2232.792686] env[69227]: DEBUG oslo_vmware.api [-] Task: {'id': task-3475219, 'name': CreateVM_Task, 'duration_secs': 0.290852} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.792686] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Created VM on the ESX host {{(pid=69227) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2232.793192] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2232.793367] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2232.793694] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 2232.793940] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132dc073-5add-4bbb-9e38-f07ac275f667 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.798128] env[69227]: DEBUG oslo_vmware.api [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2232.798128] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e8cfc8-1e15-5b14-c212-fa2510c6c640" [ 2232.798128] env[69227]: _type = "Task" [ 2232.798128] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.805363] env[69227]: DEBUG oslo_vmware.api [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52e8cfc8-1e15-5b14-c212-fa2510c6c640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.157418] env[69227]: DEBUG nova.network.neutron [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Updated VIF entry in instance network info cache for port 313ebea7-0f16-4d3e-a65f-886b04c1b56d. {{(pid=69227) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2233.157775] env[69227]: DEBUG nova.network.neutron [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Updating instance_info_cache with network_info: [{"id": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "address": "fa:16:3e:21:a8:ba", "network": {"id": "d6c1f0ab-deb1-4805-a723-43b4d8ccbbc2", "bridge": "br-int", "label": "tempest-ServersTestJSON-1694790668-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52dd89399a014fbea28c0afc4d6da8f5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c4810e0b-c5e1-43ca-8d35-de29f7ebe7b0", "external-id": "nsx-vlan-transportzone-60", "segmentation_id": 60, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap313ebea7-0f", "ovs_interfaceid": "313ebea7-0f16-4d3e-a65f-886b04c1b56d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2233.307887] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2233.308138] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Processing image 78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2233.308325] env[69227]: DEBUG oslo_concurrency.lockutils [None req-167a0abe-7f13-4c55-826b-269ae8a8e353 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2233.660557] env[69227]: DEBUG oslo_concurrency.lockutils [req-0aec7198-482f-4a2b-9292-65f388484086 req-0838db06-4841-4f24-be51-d76cdd80a2a5 service nova] Releasing lock "refresh_cache-247d620f-a6ed-4b40-9cc5-269c6f99c487" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2248.427386] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2250.427614] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.427823] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2252.428128] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2256.427637] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2256.428095] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2257.428063] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.422913] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.426761] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2258.427020] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2258.427228] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2258.932264] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.932651] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.932651] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.932731] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.932891] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.932966] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.933087] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.933210] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.933327] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2258.933444] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2258.933673] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2259.437092] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2259.437231] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2259.437344] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2259.437499] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2259.438381] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a08b5897-1b4d-4572-a928-3d0487b8b793 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.446629] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368025d2-0447-4272-89da-4fbd52e3ac22 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.459577] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28557ed-cddd-442d-9177-665637ccdc81 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.465866] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ea0ea5-b4c9-4fac-901d-19f9b1d93b1c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.494610] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2259.494757] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2259.494955] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2260.524791] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525064] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525134] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525210] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525332] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525492] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525616] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525733] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.525846] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 247d620f-a6ed-4b40-9cc5-269c6f99c487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2260.526047] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2260.526193] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2260.623048] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99b65a9-1605-48ee-995e-99e48cbf2689 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.630202] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b7ed9a-15c5-4709-b6ab-6cd51fdc29dc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.658536] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c28f79-de72-4146-a4fc-f061e5da6247 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.664906] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb952a2d-f8ad-4cca-b3d3-d3b6a6a8602a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2260.677339] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2261.180960] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2261.363389] env[69227]: WARNING oslo_vmware.rw_handles [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2261.363389] env[69227]: ERROR oslo_vmware.rw_handles [ 2261.363958] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2261.366158] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2261.366412] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Copying Virtual Disk [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/0f01eed7-4010-41be-ad1a-952e0c1b39cc/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2261.366702] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95690a76-aec1-40b2-8c07-dc62f57f8bf3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.376323] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for the task: (returnval){ [ 2261.376323] env[69227]: value = "task-3475220" [ 2261.376323] env[69227]: _type = "Task" [ 2261.376323] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.384290] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Task: {'id': task-3475220, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.686445] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2261.686752] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.192s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2261.886862] env[69227]: DEBUG oslo_vmware.exceptions [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2261.887248] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2261.887809] env[69227]: ERROR nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2261.887809] env[69227]: Faults: ['InvalidArgument'] [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Traceback (most recent call last): [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] yield resources [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.driver.spawn(context, instance, image_meta, [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._fetch_image_if_missing(context, vi) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] image_cache(vi, tmp_image_ds_loc) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] vm_util.copy_virtual_disk( [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] session._wait_for_task(vmdk_copy_task) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.wait_for_task(task_ref) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return evt.wait() [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] result = hub.switch() [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.greenlet.switch() [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.f(*self.args, **self.kw) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] raise exceptions.translate_fault(task_info.error) [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Faults: ['InvalidArgument'] [ 2261.887809] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] [ 2261.888828] env[69227]: INFO nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Terminating instance [ 2261.889574] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2261.889779] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2261.890039] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6eb28610-51d0-47ec-a4aa-638daebe72fc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.891995] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2261.892169] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2261.892335] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2261.898712] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2261.898880] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2261.900052] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24080b2d-4f54-4149-a812-be59cfe8fa68 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.905515] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 2261.905515] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]520bd69d-a65a-7753-baeb-04a368d981cc" [ 2261.905515] env[69227]: _type = "Task" [ 2261.905515] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.913150] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]520bd69d-a65a-7753-baeb-04a368d981cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.410357] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2262.417970] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2262.418273] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating directory with path [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2262.418487] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8b82277-219f-4c26-a724-0fff7e3143ae {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.437988] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Created directory with path [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2262.438203] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Fetch image to [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2262.438383] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2262.439159] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ded8291-3cc6-47ee-a1ac-8cd8f86bf2a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.445754] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a262010d-ded9-4a50-a00f-5a51edf47157 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.454484] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6fbe68-3ca6-4738-8fbf-93a0e687bde7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.458413] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.488826] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d620135-4a3a-43e9-b842-0dc9c3bc3dd3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.494997] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-318e3e64-7f8d-4ceb-9ed6-6ee48ee7d77e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.517153] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2262.668981] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2262.729709] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2262.730022] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2262.961525] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Releasing lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2262.961953] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2262.962170] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2262.963100] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9ac8dc-2cf0-4c3a-918b-24bf188167ec {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.970813] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2262.971045] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd8ca028-e726-4496-9dbd-2021cb7e4c2a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.996678] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2262.996875] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2262.997073] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Deleting the datastore file [datastore2] c6bc17ce-e672-402d-b00b-e6cd2db09fd6 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2262.997309] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85b0e2df-63ca-4cac-b446-3a2dbbcd210c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.003200] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for the task: (returnval){ [ 2263.003200] env[69227]: value = "task-3475222" [ 2263.003200] env[69227]: _type = "Task" [ 2263.003200] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2263.010455] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Task: {'id': task-3475222, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2263.512728] env[69227]: DEBUG oslo_vmware.api [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Task: {'id': task-3475222, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031122} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2263.512962] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2263.513146] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2263.513324] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2263.513494] env[69227]: INFO nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Took 0.55 seconds to destroy the instance on the hypervisor. [ 2263.513730] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2263.513945] env[69227]: DEBUG nova.compute.manager [-] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2263.515991] env[69227]: DEBUG nova.compute.claims [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2263.516175] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2263.516405] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2264.128582] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ed27da-76fe-4fb0-801c-fa5b02a99deb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.136186] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4000a8d8-e05f-48c9-ad77-1123f60ce8fa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.165540] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c32fc7-8d69-4b5c-8f21-1d3923aa420b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.172115] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43a36d6-21fe-4588-8d30-a99cc1c95d1b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.186055] env[69227]: DEBUG nova.compute.provider_tree [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2264.689226] env[69227]: DEBUG nova.scheduler.client.report [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2265.193735] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.677s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2265.194363] env[69227]: ERROR nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.194363] env[69227]: Faults: ['InvalidArgument'] [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Traceback (most recent call last): [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.driver.spawn(context, instance, image_meta, [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._fetch_image_if_missing(context, vi) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] image_cache(vi, tmp_image_ds_loc) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] vm_util.copy_virtual_disk( [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] session._wait_for_task(vmdk_copy_task) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.wait_for_task(task_ref) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return evt.wait() [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] result = hub.switch() [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.greenlet.switch() [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.f(*self.args, **self.kw) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] raise exceptions.translate_fault(task_info.error) [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Faults: ['InvalidArgument'] [ 2265.194363] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] [ 2265.195265] env[69227]: DEBUG nova.compute.utils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2265.196740] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Build of instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 was re-scheduled: A specified parameter was not correct: fileType [ 2265.196740] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2265.197140] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2265.197360] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2265.197505] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2265.197661] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2265.716557] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2265.789677] env[69227]: DEBUG nova.network.neutron [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2266.292732] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Releasing lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2266.293144] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2266.293211] env[69227]: DEBUG nova.compute.manager [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Skipping network deallocation for instance since networking was not requested. {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2285}} [ 2267.322644] env[69227]: INFO nova.scheduler.client.report [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Deleted allocations for instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 [ 2267.830119] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2c2a4e21-4af8-4928-bff2-3ba46ce79ea9 tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 611.395s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2267.830407] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 416.046s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2267.830639] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2267.830864] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2267.831047] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2267.833847] env[69227]: INFO nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Terminating instance [ 2267.835409] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquiring lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2267.835566] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Acquired lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2267.835729] env[69227]: DEBUG nova.network.neutron [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2268.352763] env[69227]: DEBUG nova.network.neutron [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2268.396220] env[69227]: DEBUG nova.network.neutron [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2268.899207] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Releasing lock "refresh_cache-c6bc17ce-e672-402d-b00b-e6cd2db09fd6" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2268.899739] env[69227]: DEBUG nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2268.899964] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2268.900295] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41994585-7a32-4b37-adb0-3ecb3c9dd076 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.909089] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-370925f3-cb86-42e2-9e22-458fb874791a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.937135] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c6bc17ce-e672-402d-b00b-e6cd2db09fd6 could not be found. [ 2268.937317] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2268.937526] env[69227]: INFO nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2268.937795] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2268.938024] env[69227]: DEBUG nova.compute.manager [-] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2268.938119] env[69227]: DEBUG nova.network.neutron [-] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2269.037460] env[69227]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=69227) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2269.037716] env[69227]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [-] Dynamic interval looping call 'oslo_service.backend.eventlet.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall exception_handler_v20(status_code, error_body) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise client_exc(message=error_message, [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Neutron server returns request_ids: ['req-c4b47f86-94dc-4183-ba28-81ce3986f128'] [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall During handling of the above exception, another exception occurred: [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall Traceback (most recent call last): [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = func(*self.args, **self.kw) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall result = f(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._deallocate_network( [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self.network_api.deallocate_for_instance( [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall data = neutron.list_ports(**search_opts) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall for r in self._pagination(collection, path, **params): [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall res = self.get(path, params=params) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.retry_request("GET", action, body=body, [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall return self.do_request(method, action, body=body, [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall ret = obj(*args, **kwargs) [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2269.038246] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2269.039602] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2269.039602] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2269.039602] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2269.039602] env[69227]: ERROR oslo.service.backend.eventlet.loopingcall [ 2269.039602] env[69227]: ERROR nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2269.543414] env[69227]: ERROR nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Traceback (most recent call last): [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] exception_handler_v20(status_code, error_body) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] raise client_exc(message=error_message, [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Neutron server returns request_ids: ['req-c4b47f86-94dc-4183-ba28-81ce3986f128'] [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] During handling of the above exception, another exception occurred: [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Traceback (most recent call last): [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._delete_instance(context, instance, bdms) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._shutdown_instance(context, instance, bdms) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._try_deallocate_network(context, instance, requested_networks) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] with excutils.save_and_reraise_exception(): [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.force_reraise() [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] raise self.value [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] _deallocate_network_with_retries() [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return evt.wait() [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] result = hub.switch() [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.greenlet.switch() [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] result = func(*self.args, **self.kw) [ 2269.543414] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] result = f(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._deallocate_network( [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self.network_api.deallocate_for_instance( [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] data = neutron.list_ports(**search_opts) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.list('ports', self.ports_path, retrieve_all, [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] for r in self._pagination(collection, path, **params): [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] res = self.get(path, params=params) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.retry_request("GET", action, body=body, [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] return self.do_request(method, action, body=body, [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] ret = obj(*args, **kwargs) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] self._handle_fault_response(status_code, replybody, resp) [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2269.544885] env[69227]: ERROR nova.compute.manager [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] [ 2270.047838] env[69227]: DEBUG oslo_concurrency.lockutils [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Lock "c6bc17ce-e672-402d-b00b-e6cd2db09fd6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.217s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2271.558100] env[69227]: INFO nova.compute.manager [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] [instance: c6bc17ce-e672-402d-b00b-e6cd2db09fd6] Successfully reverted task state from None on failure for instance. [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server [None req-fc57f040-02d3-46b3-843a-36aa44b8474a tempest-ServersAaction247Test-570855428 tempest-ServersAaction247Test-570855428-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-c4b47f86-94dc-4183-ba28-81ce3986f128'] [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3353, in terminate_instance [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3348, in do_terminate_instance [ 2271.561228] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3341, in do_terminate_instance [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3276, in _delete_instance [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3170, in _shutdown_instance [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3084, in _try_deallocate_network [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server raise self.value [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3082, in _try_deallocate_network [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 437, in func [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 151, in _run_loop [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py", line 408, in _func [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3071, in _deallocate_network_with_retries [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2291, in _deallocate_network [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2271.562798] env[69227]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2271.564723] env[69227]: ERROR oslo_messaging.rpc.server [ 2275.683646] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.428346] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2310.245979] env[69227]: WARNING oslo_vmware.rw_handles [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2310.245979] env[69227]: ERROR oslo_vmware.rw_handles [ 2310.246664] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2310.248763] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2310.249068] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Copying Virtual Disk [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/7c293f34-d9fd-4ebf-8b1a-3b656bb2ac82/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2310.249399] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33d5f720-f327-4302-bc12-c94b9da6bcdf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.258250] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 2310.258250] env[69227]: value = "task-3475223" [ 2310.258250] env[69227]: _type = "Task" [ 2310.258250] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.265882] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475223, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.768287] env[69227]: DEBUG oslo_vmware.exceptions [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2310.768606] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2310.769245] env[69227]: ERROR nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2310.769245] env[69227]: Faults: ['InvalidArgument'] [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Traceback (most recent call last): [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] yield resources [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self.driver.spawn(context, instance, image_meta, [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self._fetch_image_if_missing(context, vi) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] image_cache(vi, tmp_image_ds_loc) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] vm_util.copy_virtual_disk( [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] session._wait_for_task(vmdk_copy_task) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return self.wait_for_task(task_ref) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return evt.wait() [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] result = hub.switch() [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return self.greenlet.switch() [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self.f(*self.args, **self.kw) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] raise exceptions.translate_fault(task_info.error) [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Faults: ['InvalidArgument'] [ 2310.769245] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] [ 2310.770150] env[69227]: INFO nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Terminating instance [ 2310.771150] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2310.771357] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2310.771647] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a98ac9eb-009a-444b-98b5-9ef6c88a5ab8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.774049] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2310.774243] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2310.774968] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b885a498-d9eb-4ca8-ac13-da6153002ae7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.781523] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2310.781723] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cad654d7-b2b2-43e1-96d5-7cd41c30fc6d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.783791] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2310.783965] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2310.784913] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa494193-86cd-4c2f-9f11-ba933e74d6fb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.789672] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for the task: (returnval){ [ 2310.789672] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5267862b-f011-c84c-9a24-8968eb8b3c60" [ 2310.789672] env[69227]: _type = "Task" [ 2310.789672] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.801460] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5267862b-f011-c84c-9a24-8968eb8b3c60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.856549] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2310.856771] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2310.856955] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleting the datastore file [datastore2] cc0035fc-3edc-457b-a798-afa4f9ea7071 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2310.857236] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c80801da-3505-4cfa-a9ac-9e1a08759357 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.863824] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for the task: (returnval){ [ 2310.863824] env[69227]: value = "task-3475225" [ 2310.863824] env[69227]: _type = "Task" [ 2310.863824] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.872367] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475225, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.300339] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2311.300591] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Creating directory with path [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2311.300823] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c49caa8-1010-47d0-8019-05da7ca00076 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.372654] env[69227]: DEBUG oslo_vmware.api [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Task: {'id': task-3475225, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075597} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.372876] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2311.373068] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2311.373241] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2311.373410] env[69227]: INFO nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2311.375600] env[69227]: DEBUG nova.compute.claims [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2311.375770] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2311.376032] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2311.403706] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Created directory with path [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2311.403925] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Fetch image to [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2311.404116] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2311.404928] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278a03be-0a3f-41ce-985d-70342224df63 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.412330] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668968a7-c9d9-4ff1-8edd-bdce7e55072a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.421894] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d493c7c8-be18-4504-91aa-968cad9ff0d8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.426272] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2311.452936] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d72526-5cc7-4f99-8cd1-2a04b335ba04 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.458171] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7d33e290-8dd5-49df-91e7-089ee3806b3f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.478071] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2311.595346] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2311.655925] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2311.656134] env[69227]: DEBUG oslo_vmware.rw_handles [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2311.983362] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f6f344-7cb0-4594-9f65-5a5b8e8a2438 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.990958] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac37a47-e401-471d-81d2-1e4e082dc4f4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.021914] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1addc3-1eab-42c7-9122-e4c6592a355d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.028751] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b4e06a-efca-4496-a2fe-8febc4c565da {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.041439] env[69227]: DEBUG nova.compute.provider_tree [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.544850] env[69227]: DEBUG nova.scheduler.client.report [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2313.052646] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.676s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2313.053231] env[69227]: ERROR nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2313.053231] env[69227]: Faults: ['InvalidArgument'] [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Traceback (most recent call last): [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self.driver.spawn(context, instance, image_meta, [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self._fetch_image_if_missing(context, vi) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] image_cache(vi, tmp_image_ds_loc) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] vm_util.copy_virtual_disk( [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] session._wait_for_task(vmdk_copy_task) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return self.wait_for_task(task_ref) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return evt.wait() [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] result = hub.switch() [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] return self.greenlet.switch() [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] self.f(*self.args, **self.kw) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] raise exceptions.translate_fault(task_info.error) [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Faults: ['InvalidArgument'] [ 2313.053231] env[69227]: ERROR nova.compute.manager [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] [ 2313.054070] env[69227]: DEBUG nova.compute.utils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2313.055733] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Build of instance cc0035fc-3edc-457b-a798-afa4f9ea7071 was re-scheduled: A specified parameter was not correct: fileType [ 2313.055733] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2313.056115] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2313.056290] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2313.056459] env[69227]: DEBUG nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2313.056639] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2313.426608] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2313.426784] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2313.797045] env[69227]: DEBUG nova.network.neutron [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2314.300160] env[69227]: INFO nova.compute.manager [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Took 1.24 seconds to deallocate network for instance. [ 2315.329444] env[69227]: INFO nova.scheduler.client.report [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Deleted allocations for instance cc0035fc-3edc-457b-a798-afa4f9ea7071 [ 2315.836782] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f6927cd3-6971-4da0-9bbd-40385f0732f9 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 624.535s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2315.837081] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 428.786s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2315.837332] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Acquiring lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2315.837550] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2315.837720] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2315.839891] env[69227]: INFO nova.compute.manager [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Terminating instance [ 2315.841637] env[69227]: DEBUG nova.compute.manager [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2315.841836] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2315.842114] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-36693324-a617-4577-9d60-1af1bc4a214c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.851841] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cada5b6b-9314-49fe-b0ff-846bf3664ed4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2315.878093] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc0035fc-3edc-457b-a798-afa4f9ea7071 could not be found. [ 2315.878283] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2315.878460] env[69227]: INFO nova.compute.manager [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2315.878692] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2315.878909] env[69227]: DEBUG nova.compute.manager [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2315.879013] env[69227]: DEBUG nova.network.neutron [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2316.397991] env[69227]: DEBUG nova.network.neutron [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2316.427412] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2316.902097] env[69227]: INFO nova.compute.manager [-] [instance: cc0035fc-3edc-457b-a798-afa4f9ea7071] Took 1.02 seconds to deallocate network for instance. [ 2317.426786] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2317.927139] env[69227]: DEBUG oslo_concurrency.lockutils [None req-faa2e3cd-61b5-4355-997d-f5ddc9603cd7 tempest-DeleteServersTestJSON-155067658 tempest-DeleteServersTestJSON-155067658-project-member] Lock "cc0035fc-3edc-457b-a798-afa4f9ea7071" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.090s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2319.427804] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.428107] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2319.931150] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2319.931427] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2319.931702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2319.931875] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2319.932742] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217a83e7-966a-4481-b406-83798eb5360a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.941413] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6971673d-20da-47b6-a86c-ee2936718185 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.955541] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fb9bfe-4a7f-4d44-b037-f3dd608b7da8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.961569] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd3c83f-dcba-4449-aa2e-e6779b41798e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.989710] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180976MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2319.989890] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2319.990168] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2321.018162] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018383] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018460] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018580] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018694] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018811] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.018926] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 247d620f-a6ed-4b40-9cc5-269c6f99c487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2321.019188] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2321.019340] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2321.177767] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1710b440-9f3b-44bd-923f-cfa6aefeb5d9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.185251] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d0acec3-4a46-4ca6-946c-65ea2b75c855 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.214114] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11953ae3-78f3-4fa5-93a3-1203bd164c9a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.220545] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a8fc55-44c2-47b6-b638-a6a0fadce83d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2321.234235] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2321.737785] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2322.242488] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2322.242797] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.253s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2323.237576] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2323.237802] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2323.237949] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2323.238084] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2323.742070] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742464] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742464] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742557] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742601] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742722] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742842] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2323.742959] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2339.800643] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f1aaad3d-a16c-4262-acf5-aa248dd9deef tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2359.735879] env[69227]: WARNING oslo_vmware.rw_handles [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2359.735879] env[69227]: ERROR oslo_vmware.rw_handles [ 2359.736667] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2359.738609] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2359.738899] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Copying Virtual Disk [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/106f412c-9802-4d35-83b4-e566c282212d/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2359.739242] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-278cf268-a633-408f-91c2-049d4137e52d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2359.746722] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for the task: (returnval){ [ 2359.746722] env[69227]: value = "task-3475226" [ 2359.746722] env[69227]: _type = "Task" [ 2359.746722] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2359.755929] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Task: {'id': task-3475226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.257017] env[69227]: DEBUG oslo_vmware.exceptions [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2360.257308] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2360.257865] env[69227]: ERROR nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2360.257865] env[69227]: Faults: ['InvalidArgument'] [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Traceback (most recent call last): [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] yield resources [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self.driver.spawn(context, instance, image_meta, [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self._fetch_image_if_missing(context, vi) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] image_cache(vi, tmp_image_ds_loc) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] vm_util.copy_virtual_disk( [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] session._wait_for_task(vmdk_copy_task) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return self.wait_for_task(task_ref) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return evt.wait() [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] result = hub.switch() [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return self.greenlet.switch() [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self.f(*self.args, **self.kw) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] raise exceptions.translate_fault(task_info.error) [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Faults: ['InvalidArgument'] [ 2360.257865] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] [ 2360.258940] env[69227]: INFO nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Terminating instance [ 2360.259666] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2360.260936] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2360.261617] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2360.261811] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2360.262059] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64795c26-b607-4f5c-a99a-5eaa4398453d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.264434] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93771934-4034-4f09-9c8b-073443a081e4 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.271924] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2360.272935] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-40c9ca4b-00cf-4b4e-b321-ab654665a6b7 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.274314] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2360.274485] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2360.275154] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20333f64-3cc0-4827-8413-a10b67822871 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.281156] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for the task: (returnval){ [ 2360.281156] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5226cde4-7943-7583-8983-084dcd5351ef" [ 2360.281156] env[69227]: _type = "Task" [ 2360.281156] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.289181] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]5226cde4-7943-7583-8983-084dcd5351ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.354436] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2360.354601] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2360.354787] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Deleting the datastore file [datastore2] 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2360.355109] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-18afbbe5-92ed-4ca0-be0b-e8f5fd338086 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.363449] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for the task: (returnval){ [ 2360.363449] env[69227]: value = "task-3475228" [ 2360.363449] env[69227]: _type = "Task" [ 2360.363449] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2360.371140] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Task: {'id': task-3475228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2360.791913] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2360.792320] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Creating directory with path [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2360.792477] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ede5edb-5cab-4622-974c-3c02cf3a8d0a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.803809] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Created directory with path [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2360.803987] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Fetch image to [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2360.804198] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2360.804902] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43b6f91-9f60-456c-8e17-683211eb6a16 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.810960] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5407ba3a-5061-425d-9961-23a95efb778f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.819560] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6103a1c-71b8-4a20-a438-01c0ac1921e5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.849642] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361f3d3d-db43-4ec9-8634-06ee241fa85b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.854668] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-82214f68-3f24-48f6-9ea5-d0c07b3fa961 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2360.871722] env[69227]: DEBUG oslo_vmware.api [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Task: {'id': task-3475228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073816} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2360.872033] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2360.872260] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2360.872465] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2360.872689] env[69227]: INFO nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2360.875535] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2360.877461] env[69227]: DEBUG nova.compute.claims [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2360.877629] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2360.877873] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2360.932031] env[69227]: DEBUG oslo_vmware.rw_handles [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2360.992184] env[69227]: DEBUG oslo_vmware.rw_handles [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2360.992410] env[69227]: DEBUG oslo_vmware.rw_handles [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2361.467057] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b11834-2a13-4ea5-8dd2-f6f699b0bd58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.474214] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b7180f-95b0-4ee1-a27d-cfc96d53c2ba {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.504673] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e40b3f4-b70b-4273-8193-806a710dfc8b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.511360] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede0986e-552b-4608-92e0-7e6096fc604a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2361.523922] env[69227]: DEBUG nova.compute.provider_tree [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2362.027181] env[69227]: DEBUG nova.scheduler.client.report [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2362.532487] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.654s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2362.533017] env[69227]: ERROR nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.533017] env[69227]: Faults: ['InvalidArgument'] [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Traceback (most recent call last): [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self.driver.spawn(context, instance, image_meta, [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self._fetch_image_if_missing(context, vi) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] image_cache(vi, tmp_image_ds_loc) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] vm_util.copy_virtual_disk( [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] session._wait_for_task(vmdk_copy_task) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return self.wait_for_task(task_ref) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return evt.wait() [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] result = hub.switch() [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] return self.greenlet.switch() [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] self.f(*self.args, **self.kw) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] raise exceptions.translate_fault(task_info.error) [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Faults: ['InvalidArgument'] [ 2362.533017] env[69227]: ERROR nova.compute.manager [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] [ 2362.533848] env[69227]: DEBUG nova.compute.utils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2362.535411] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Build of instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 was re-scheduled: A specified parameter was not correct: fileType [ 2362.535411] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2362.535787] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2362.535956] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2362.536144] env[69227]: DEBUG nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2362.536346] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2363.267398] env[69227]: DEBUG nova.network.neutron [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2363.771275] env[69227]: INFO nova.compute.manager [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Took 1.23 seconds to deallocate network for instance. [ 2364.805620] env[69227]: INFO nova.scheduler.client.report [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Deleted allocations for instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 [ 2365.312930] env[69227]: DEBUG oslo_concurrency.lockutils [None req-5dccac9c-ce37-4e5b-8bc3-a389b20e3760 tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 671.386s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2365.313227] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 475.147s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2365.313491] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Acquiring lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2365.313670] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2365.313858] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2365.316744] env[69227]: INFO nova.compute.manager [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Terminating instance [ 2365.318426] env[69227]: DEBUG nova.compute.manager [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2365.318624] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2365.318885] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f697fcaf-de83-4565-9234-79d742351124 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.329118] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d07953b-06b3-47ce-9124-6a45cc575b55 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.355119] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8ed695cd-8c17-43e0-ba42-081f2aecd8c2 could not be found. [ 2365.355323] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2365.355501] env[69227]: INFO nova.compute.manager [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2365.355734] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2365.355948] env[69227]: DEBUG nova.compute.manager [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2365.356064] env[69227]: DEBUG nova.network.neutron [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2365.872560] env[69227]: DEBUG nova.network.neutron [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2366.376105] env[69227]: INFO nova.compute.manager [-] [instance: 8ed695cd-8c17-43e0-ba42-081f2aecd8c2] Took 1.02 seconds to deallocate network for instance. [ 2367.399793] env[69227]: DEBUG oslo_concurrency.lockutils [None req-638cb40a-1471-479d-9f37-7491be3a547a tempest-ServersNegativeTestJSON-1235680419 tempest-ServersNegativeTestJSON-1235680419-project-member] Lock "8ed695cd-8c17-43e0-ba42-081f2aecd8c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.086s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2368.427846] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.607603] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2370.608060] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 2370.608060] env[69227]: value = "domain-c8" [ 2370.608060] env[69227]: _type = "ClusterComputeResource" [ 2370.608060] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2370.609159] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fb5ba47-d1c3-4c3b-af68-2c45811e3e40 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2370.623631] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 6 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2373.946494] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.427209] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.427575] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2377.427773] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2379.427652] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.422566] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.426235] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.426573] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2380.930133] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2380.930536] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2380.930708] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2380.930900] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2380.931807] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d2835a-88f8-4940-a028-b78bb9996839 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.941011] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794c9784-75af-4bf5-9606-d99d379cca8b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.956287] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64203dda-343f-4e5b-a1bf-6e0160d262e2 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.962693] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb3f384-b9f6-451e-8aa5-14248637f01f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.990810] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180937MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2380.990979] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2380.991189] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2382.062202] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 5539b326-2f24-45b7-874a-edc484e82267 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.062421] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.062555] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.062677] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.062794] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.062911] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 247d620f-a6ed-4b40-9cc5-269c6f99c487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2382.063115] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2382.063257] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2382.079335] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing inventories for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2382.091707] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating ProviderTree inventory for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2382.091888] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Updating inventory in ProviderTree for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2382.102157] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing aggregate associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, aggregates: None {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2382.121126] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Refreshing trait associations for resource provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=69227) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2382.194434] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-218c819b-1706-4564-93f7-fdca0a1bfeb8 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.201613] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6981e5bc-4d70-46b0-9f40-3d5db24c7849 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.231885] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2935381b-8dba-44f3-bff6-bb0c5aafcb7a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.238717] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-333d4df6-f7af-4b46-b0cb-18758f29b58e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.251397] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2382.754404] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2383.258990] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2383.259276] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.268s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2383.259480] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2383.259663] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11243}} [ 2383.762328] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] There are 0 instances to clean {{(pid=69227) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11252}} [ 2384.764028] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.764443] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2384.764443] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2385.267903] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268116] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268208] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268336] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268458] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268577] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2385.268697] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2392.094122] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_power_states {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2392.598470] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Getting list of instances from cluster (obj){ [ 2392.598470] env[69227]: value = "domain-c8" [ 2392.598470] env[69227]: _type = "ClusterComputeResource" [ 2392.598470] env[69227]: } {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2392.599516] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17f9062a-f244-41ce-8bb2-31be3d9457b0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2392.613066] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Got total of 6 instances {{(pid=69227) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 2392.613238] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 5539b326-2f24-45b7-874a-edc484e82267 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.613436] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid f0394b5e-1437-4e73-9177-0d3f9b1a16ae {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.613616] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 84d5494a-c08b-45be-a35a-860e64fdf76f {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.613794] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 0b31dc0b-6a70-41aa-adbe-d989a002adca {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.613945] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.614108] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Triggering sync for uuid 247d620f-a6ed-4b40-9cc5-269c6f99c487 {{(pid=69227) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10353}} [ 2392.614414] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "5539b326-2f24-45b7-874a-edc484e82267" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2392.614695] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2392.614901] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "84d5494a-c08b-45be-a35a-860e64fdf76f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2392.615117] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "0b31dc0b-6a70-41aa-adbe-d989a002adca" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2392.615312] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "2d191d85-97d5-4b5e-9e1b-1fdd68754e4b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2392.615513] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "247d620f-a6ed-4b40-9cc5-269c6f99c487" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2394.427450] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.427853] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Cleaning up deleted instances with incomplete migration {{(pid=69227) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11281}} [ 2395.427348] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.932593] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2409.750114] env[69227]: WARNING oslo_vmware.rw_handles [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2409.750114] env[69227]: ERROR oslo_vmware.rw_handles [ 2409.750852] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2409.752603] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2409.752868] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Copying Virtual Disk [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/9024d675-1b85-452b-8608-2052d77cd1df/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2409.753168] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6deafb9-acbb-4919-9e59-2d3471d1720e {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2409.762010] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for the task: (returnval){ [ 2409.762010] env[69227]: value = "task-3475229" [ 2409.762010] env[69227]: _type = "Task" [ 2409.762010] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2409.769958] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': task-3475229, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.273271] env[69227]: DEBUG oslo_vmware.exceptions [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2410.273547] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2410.274124] env[69227]: ERROR nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2410.274124] env[69227]: Faults: ['InvalidArgument'] [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] Traceback (most recent call last): [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] yield resources [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self.driver.spawn(context, instance, image_meta, [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self._fetch_image_if_missing(context, vi) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] image_cache(vi, tmp_image_ds_loc) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] vm_util.copy_virtual_disk( [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] session._wait_for_task(vmdk_copy_task) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return self.wait_for_task(task_ref) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return evt.wait() [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] result = hub.switch() [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return self.greenlet.switch() [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self.f(*self.args, **self.kw) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] raise exceptions.translate_fault(task_info.error) [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] Faults: ['InvalidArgument'] [ 2410.274124] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] [ 2410.275118] env[69227]: INFO nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Terminating instance [ 2410.275981] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2410.276194] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2410.276433] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fa0862d-d985-4874-a561-174e20053a58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.278518] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2410.278720] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2410.279442] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fb63b0-7c10-4d95-987d-d86f2974ba87 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.286740] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2410.287677] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aa90dbd9-2e0a-400d-94f9-bb2250e13d7d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.288987] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2410.289181] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2410.289829] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b97da2d1-4bde-4ad6-bd23-e1b99a88ce95 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.294751] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for the task: (returnval){ [ 2410.294751] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52744868-ff84-4bd8-7358-96ccf7d1c4f5" [ 2410.294751] env[69227]: _type = "Task" [ 2410.294751] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.301563] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52744868-ff84-4bd8-7358-96ccf7d1c4f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.362064] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2410.362293] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2410.362467] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Deleting the datastore file [datastore2] 5539b326-2f24-45b7-874a-edc484e82267 {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2410.362845] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4abebef1-039d-45f4-9191-b545296cb2ee {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.370138] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for the task: (returnval){ [ 2410.370138] env[69227]: value = "task-3475231" [ 2410.370138] env[69227]: _type = "Task" [ 2410.370138] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2410.378062] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': task-3475231, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2410.805150] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2410.805500] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Creating directory with path [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2410.805616] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f54b00ba-04c3-49cd-8a31-9e9202e11081 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.816100] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Created directory with path [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2410.816279] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Fetch image to [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2410.816454] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2410.817148] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c59960de-9c4c-4179-89c6-b0d8adcb992a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.823118] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-452c3cfd-45fb-44d3-b7e1-4d110db20437 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.831837] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e3f0072-ead7-4154-a83c-b668dc3881bf {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.862447] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fa16c9-e9cf-4258-8369-49d46ab522a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.867605] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-1fe72992-a854-4dc1-809e-8251e968a16c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2410.877357] env[69227]: DEBUG oslo_vmware.api [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Task: {'id': task-3475231, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073886} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2410.877598] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2410.877776] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2410.877945] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2410.878147] env[69227]: INFO nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2410.880228] env[69227]: DEBUG nova.compute.claims [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2410.880410] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2410.880664] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2410.886436] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2410.938473] env[69227]: DEBUG oslo_vmware.rw_handles [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2410.999418] env[69227]: DEBUG oslo_vmware.rw_handles [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2410.999601] env[69227]: DEBUG oslo_vmware.rw_handles [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2411.462407] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4876463-9576-4a0a-8357-c82543d90ce3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.469664] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f5192ea-bed7-454f-91d3-42f10f9d7247 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.499158] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c10dbc3-8215-4d1e-839c-094298116837 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.505616] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc773eff-11aa-4d02-96bf-8e1e9afa7a58 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.519064] env[69227]: DEBUG nova.compute.provider_tree [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2412.022638] env[69227]: DEBUG nova.scheduler.client.report [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2412.527721] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.647s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2412.528343] env[69227]: ERROR nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.528343] env[69227]: Faults: ['InvalidArgument'] [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] Traceback (most recent call last): [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self.driver.spawn(context, instance, image_meta, [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self._fetch_image_if_missing(context, vi) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] image_cache(vi, tmp_image_ds_loc) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] vm_util.copy_virtual_disk( [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] session._wait_for_task(vmdk_copy_task) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return self.wait_for_task(task_ref) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return evt.wait() [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] result = hub.switch() [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] return self.greenlet.switch() [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] self.f(*self.args, **self.kw) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] raise exceptions.translate_fault(task_info.error) [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] Faults: ['InvalidArgument'] [ 2412.528343] env[69227]: ERROR nova.compute.manager [instance: 5539b326-2f24-45b7-874a-edc484e82267] [ 2412.529174] env[69227]: DEBUG nova.compute.utils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2412.530765] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Build of instance 5539b326-2f24-45b7-874a-edc484e82267 was re-scheduled: A specified parameter was not correct: fileType [ 2412.530765] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2412.531177] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2412.531351] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2412.531524] env[69227]: DEBUG nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2412.531686] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2413.247877] env[69227]: DEBUG nova.network.neutron [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2413.751475] env[69227]: INFO nova.compute.manager [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Took 1.22 seconds to deallocate network for instance. [ 2414.786811] env[69227]: INFO nova.scheduler.client.report [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Deleted allocations for instance 5539b326-2f24-45b7-874a-edc484e82267 [ 2415.295068] env[69227]: DEBUG oslo_concurrency.lockutils [None req-87de73a9-46d6-4161-8414-1967f5ab4de7 tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 663.779s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2415.295344] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 468.124s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2415.295576] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Acquiring lock "5539b326-2f24-45b7-874a-edc484e82267-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2415.295816] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2415.296095] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2415.298800] env[69227]: INFO nova.compute.manager [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Terminating instance [ 2415.300541] env[69227]: DEBUG nova.compute.manager [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2415.300737] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2415.300997] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-65055bb8-54da-4c5c-8bc1-762b8d7b77c1 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.309276] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d0f82bc-4d44-466d-a953-35da49ea4d0b {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.334780] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5539b326-2f24-45b7-874a-edc484e82267 could not be found. [ 2415.334980] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2415.335173] env[69227]: INFO nova.compute.manager [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2415.335416] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2415.335633] env[69227]: DEBUG nova.compute.manager [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2415.335770] env[69227]: DEBUG nova.network.neutron [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2415.853949] env[69227]: DEBUG nova.network.neutron [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2416.357713] env[69227]: INFO nova.compute.manager [-] [instance: 5539b326-2f24-45b7-874a-edc484e82267] Took 1.02 seconds to deallocate network for instance. [ 2417.381852] env[69227]: DEBUG oslo_concurrency.lockutils [None req-2cd4846e-aa58-4ea6-8e6e-76711cca8c0e tempest-AttachVolumeTestJSON-998289800 tempest-AttachVolumeTestJSON-998289800-project-member] Lock "5539b326-2f24-45b7-874a-edc484e82267" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.086s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2417.382690] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "5539b326-2f24-45b7-874a-edc484e82267" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 24.768s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2417.382877] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 5539b326-2f24-45b7-874a-edc484e82267] During sync_power_state the instance has a pending task (deleting). Skip. [ 2417.383068] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "5539b326-2f24-45b7-874a-edc484e82267" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2421.068676] env[69227]: DEBUG oslo_concurrency.lockutils [None req-0863fd3a-5370-4a79-9a94-e7c861ceef2e tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "247d620f-a6ed-4b40-9cc5-269c6f99c487" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2429.931316] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2434.427076] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2437.427956] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2437.428336] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2438.428204] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2440.428844] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.422671] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2441.426443] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.427050] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2442.930981] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2442.931228] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2442.931377] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2442.931534] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2442.932437] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad66e7af-1c08-44e9-9b0e-aa5b5a0d33a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.941121] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0259b3fd-af41-4fe7-9182-e46f9d25657d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.955358] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a0a99e-a4e7-4e41-82fc-0ffeabba2f1d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.961520] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acd4264b-795b-4d87-bdea-2f329efc2fa0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2442.990753] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2442.990893] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2442.991103] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2444.019202] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2444.019556] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2444.019556] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2444.019770] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2444.019770] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 247d620f-a6ed-4b40-9cc5-269c6f99c487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2444.019878] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2444.020028] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2444.090067] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384a476e-5209-4993-957f-f37b82e11496 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.097718] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d05277-0fbf-4e3f-bfb5-ccdc89cd4250 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.127067] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd323a2f-2555-49cb-af8e-4511aec7331c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.134124] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055a213b-5cdb-47eb-b5fd-53e9c266ea29 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2444.147738] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2444.650764] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2445.155991] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2445.156380] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.165s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2446.157033] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2446.157380] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2446.157380] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2446.661418] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2446.661604] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2446.661703] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2446.661829] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2446.661949] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2446.662080] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2456.430427] env[69227]: WARNING oslo_vmware.rw_handles [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2456.430427] env[69227]: ERROR oslo_vmware.rw_handles [ 2456.430427] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2456.431849] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2456.432129] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Copying Virtual Disk [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/b06d9c0a-fe92-48aa-980c-45ae43d81528/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2456.432430] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57146b5b-bb13-40da-9acd-21cc04eb0152 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.440533] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for the task: (returnval){ [ 2456.440533] env[69227]: value = "task-3475232" [ 2456.440533] env[69227]: _type = "Task" [ 2456.440533] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2456.447859] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Task: {'id': task-3475232, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2456.951144] env[69227]: DEBUG oslo_vmware.exceptions [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2456.951390] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2456.951997] env[69227]: ERROR nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2456.951997] env[69227]: Faults: ['InvalidArgument'] [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Traceback (most recent call last): [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] yield resources [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self.driver.spawn(context, instance, image_meta, [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self._fetch_image_if_missing(context, vi) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] image_cache(vi, tmp_image_ds_loc) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] vm_util.copy_virtual_disk( [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] session._wait_for_task(vmdk_copy_task) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return self.wait_for_task(task_ref) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return evt.wait() [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] result = hub.switch() [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return self.greenlet.switch() [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self.f(*self.args, **self.kw) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] raise exceptions.translate_fault(task_info.error) [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Faults: ['InvalidArgument'] [ 2456.951997] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] [ 2456.953093] env[69227]: INFO nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Terminating instance [ 2456.953750] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2456.954273] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2456.954558] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-884b656a-077f-428f-9a1c-75177d53fd53 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.956674] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2456.956862] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2456.957658] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7fb223-f200-4a83-97c4-27a704c944ab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.964548] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2456.964772] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-142b21dd-c174-48e5-8c14-f8e407e504dc {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.966919] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2456.967095] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2456.968088] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48575d92-b5b8-4c6c-9c15-238216d54ac5 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2456.972581] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2456.972581] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52621134-d114-96a1-007b-f32b7bfedd51" [ 2456.972581] env[69227]: _type = "Task" [ 2456.972581] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2456.979567] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]52621134-d114-96a1-007b-f32b7bfedd51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2457.046361] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2457.046611] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2457.046791] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Deleting the datastore file [datastore2] f0394b5e-1437-4e73-9177-0d3f9b1a16ae {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2457.047073] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-61c87208-b747-4061-be12-367a0a318dca {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.052510] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for the task: (returnval){ [ 2457.052510] env[69227]: value = "task-3475234" [ 2457.052510] env[69227]: _type = "Task" [ 2457.052510] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2457.060887] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Task: {'id': task-3475234, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2457.483160] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2457.483516] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating directory with path [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2457.483642] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae0c3682-8608-437a-878e-1efe3878b4af {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.493955] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Created directory with path [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2457.494150] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Fetch image to [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2457.494323] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2457.495007] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9909f114-0845-4e13-9251-1c031d7f4cd3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.501281] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2c17d4-a36c-4006-adfd-4019988916ef {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.509895] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54bb8b95-05a0-4425-9fb4-cb1c5963bf44 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.540075] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f77264c-562a-45d1-b9ff-ba02fc9f1409 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.544971] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-95fe9eef-3b9e-45e7-9b9f-5913a85b41ab {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.559678] env[69227]: DEBUG oslo_vmware.api [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Task: {'id': task-3475234, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063363} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2457.559913] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2457.560106] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2457.560277] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2457.560448] env[69227]: INFO nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2457.562419] env[69227]: DEBUG nova.compute.claims [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2457.562601] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2457.562835] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2457.566614] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2457.615304] env[69227]: DEBUG oslo_vmware.rw_handles [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2457.676078] env[69227]: DEBUG oslo_vmware.rw_handles [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2457.676300] env[69227]: DEBUG oslo_vmware.rw_handles [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2458.131352] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c030657-7011-4d5f-b6e6-d84b06fcca99 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.138534] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac06597-3d88-44c0-9742-6930eb840e36 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.167647] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc786dc-a682-4b10-ac2f-c80ff80efde3 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.174060] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81209ee0-c6c5-4a1e-86d7-8cfa1b6cd436 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2458.187487] env[69227]: DEBUG nova.compute.provider_tree [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2458.690770] env[69227]: DEBUG nova.scheduler.client.report [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2459.195679] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.633s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2459.196267] env[69227]: ERROR nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.196267] env[69227]: Faults: ['InvalidArgument'] [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Traceback (most recent call last): [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self.driver.spawn(context, instance, image_meta, [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self._fetch_image_if_missing(context, vi) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] image_cache(vi, tmp_image_ds_loc) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] vm_util.copy_virtual_disk( [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] session._wait_for_task(vmdk_copy_task) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return self.wait_for_task(task_ref) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return evt.wait() [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] result = hub.switch() [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] return self.greenlet.switch() [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] self.f(*self.args, **self.kw) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] raise exceptions.translate_fault(task_info.error) [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Faults: ['InvalidArgument'] [ 2459.196267] env[69227]: ERROR nova.compute.manager [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] [ 2459.197104] env[69227]: DEBUG nova.compute.utils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2459.198674] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Build of instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae was re-scheduled: A specified parameter was not correct: fileType [ 2459.198674] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2459.199051] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2459.199223] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2459.199391] env[69227]: DEBUG nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2459.199550] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2459.956203] env[69227]: DEBUG nova.network.neutron [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2460.459535] env[69227]: INFO nova.compute.manager [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Took 1.26 seconds to deallocate network for instance. [ 2461.492141] env[69227]: INFO nova.scheduler.client.report [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Deleted allocations for instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae [ 2462.000132] env[69227]: DEBUG oslo_concurrency.lockutils [None req-05cf258c-12c6-476c-bcd0-dad989fa9e15 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.888s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2462.000409] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.466s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2462.000645] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Acquiring lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2462.000857] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2462.001038] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2462.004034] env[69227]: INFO nova.compute.manager [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Terminating instance [ 2462.005834] env[69227]: DEBUG nova.compute.manager [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2462.006043] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2462.006324] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d0267c2-42e9-4556-9c89-b400fc31b33d {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.015056] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f833b099-cb2e-4e8e-bb37-e8b76743e57c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2462.039791] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f0394b5e-1437-4e73-9177-0d3f9b1a16ae could not be found. [ 2462.039982] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2462.040171] env[69227]: INFO nova.compute.manager [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2462.040406] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2462.040614] env[69227]: DEBUG nova.compute.manager [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2462.040709] env[69227]: DEBUG nova.network.neutron [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2462.557800] env[69227]: DEBUG nova.network.neutron [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2463.060984] env[69227]: INFO nova.compute.manager [-] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] Took 1.02 seconds to deallocate network for instance. [ 2464.084949] env[69227]: DEBUG oslo_concurrency.lockutils [None req-c4aea787-e6c5-4aff-a294-b4c810b24bd6 tempest-ServerPasswordTestJSON-1801642119 tempest-ServerPasswordTestJSON-1801642119-project-member] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.084s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2464.085879] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 71.471s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2464.086083] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: f0394b5e-1437-4e73-9177-0d3f9b1a16ae] During sync_power_state the instance has a pending task (deleting). Skip. [ 2464.086266] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "f0394b5e-1437-4e73-9177-0d3f9b1a16ae" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2491.427401] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2495.427350] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2498.428581] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2498.428581] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2498.428581] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69227) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10562}} [ 2500.427626] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2501.423392] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2502.427323] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2504.427086] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2504.427474] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Starting heal instance info cache {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9943}} [ 2504.427474] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Rebuilding the list of instances to heal {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9947}} [ 2504.757174] env[69227]: WARNING oslo_vmware.rw_handles [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles response.begin() [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2504.757174] env[69227]: ERROR oslo_vmware.rw_handles [ 2504.757962] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Downloaded image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2504.759766] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Caching image {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2504.760018] env[69227]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Copying Virtual Disk [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk to [datastore2] vmware_temp/042ac53f-6b95-4dd3-b800-a6cad15f6d95/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk {{(pid=69227) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2504.760302] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3e02fe2-b213-40db-a5c5-97743f783fa9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2504.768017] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2504.768017] env[69227]: value = "task-3475235" [ 2504.768017] env[69227]: _type = "Task" [ 2504.768017] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2504.775567] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475235, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2504.931995] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2504.932202] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2504.932302] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2504.932425] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 247d620f-a6ed-4b40-9cc5-269c6f99c487] Skipping network cache update for instance because it is Building. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9956}} [ 2504.932547] env[69227]: DEBUG nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Didn't find any instances for network info cache update. {{(pid=69227) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10029}} [ 2504.932759] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager.update_available_resource {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2505.278372] env[69227]: DEBUG oslo_vmware.exceptions [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Fault InvalidArgument not matched. {{(pid=69227) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2505.278623] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2505.279184] env[69227]: ERROR nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2505.279184] env[69227]: Faults: ['InvalidArgument'] [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Traceback (most recent call last): [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] yield resources [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self.driver.spawn(context, instance, image_meta, [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self._fetch_image_if_missing(context, vi) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] image_cache(vi, tmp_image_ds_loc) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] vm_util.copy_virtual_disk( [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] session._wait_for_task(vmdk_copy_task) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return self.wait_for_task(task_ref) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return evt.wait() [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] result = hub.switch() [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return self.greenlet.switch() [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self.f(*self.args, **self.kw) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] raise exceptions.translate_fault(task_info.error) [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Faults: ['InvalidArgument'] [ 2505.279184] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] [ 2505.280140] env[69227]: INFO nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Terminating instance [ 2505.280955] env[69227]: DEBUG oslo_concurrency.lockutils [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Acquired lock "[datastore2] devstack-image-cache_base/78c61090-3613-43e2-b8eb-045dfd47af0c/78c61090-3613-43e2-b8eb-045dfd47af0c.vmdk" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2505.281934] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2505.281934] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-882ac51a-0a77-418a-91ff-570ab02e9188 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.283659] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2505.283856] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2505.284568] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dbbe61d-fcc4-4328-aeda-7840d513ada9 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.291115] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Unregistering the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2505.291321] env[69227]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c01eed15-e97d-4404-907e-5b838ce34e14 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.293324] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2505.293501] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69227) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2505.294409] env[69227]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0c858e1-8b15-42ac-b760-783b5822931c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.299811] env[69227]: DEBUG oslo_vmware.api [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Waiting for the task: (returnval){ [ 2505.299811] env[69227]: value = "session[52627fcb-53b9-0a62-ea9b-7a661261c96c]528ebca1-8525-ec00-dfb9-9399e25c2195" [ 2505.299811] env[69227]: _type = "Task" [ 2505.299811] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.306525] env[69227]: DEBUG oslo_vmware.api [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Task: {'id': session[52627fcb-53b9-0a62-ea9b-7a661261c96c]528ebca1-8525-ec00-dfb9-9399e25c2195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.364069] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Unregistered the VM {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2505.364250] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Deleting contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2505.364429] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleting the datastore file [datastore2] 84d5494a-c08b-45be-a35a-860e64fdf76f {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2505.364726] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-318a06d6-2670-4e4c-968b-ad7f3ce48149 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.371357] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for the task: (returnval){ [ 2505.371357] env[69227]: value = "task-3475237" [ 2505.371357] env[69227]: _type = "Task" [ 2505.371357] env[69227]: } to complete. {{(pid=69227) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2505.379026] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475237, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2505.435179] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2505.435526] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2505.435624] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2505.435762] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69227) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2505.436694] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a92f4d6-c02e-48b8-b8c9-6b962079e4fa {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.443909] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35f39e8-31fa-44c0-af6f-44dc9145832a {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.457228] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d595b9a9-26f2-4480-beba-ba5ca57188f6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.463262] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba519a0-4925-4925-97a9-652a6b6adcb0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.491228] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180953MB free_disk=93GB free_vcpus=48 pci_devices=None {{(pid=69227) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2505.491382] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2505.491569] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2505.810529] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Preparing fetch location {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2505.810790] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating directory with path [datastore2] vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2505.811031] env[69227]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d1fa1c2c-fedd-401c-a1e3-910f13535584 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.822016] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Created directory with path [datastore2] vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c {{(pid=69227) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2505.822217] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Fetch image to [datastore2] vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk {{(pid=69227) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2505.822388] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to [datastore2] vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk on the data store datastore2 {{(pid=69227) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2505.823096] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce541c-717a-4a59-8463-bc62668fe308 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.829221] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44688e12-9f54-43fe-9791-92ccbe82e577 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.837680] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6541d11-508f-4260-8bf3-614e48cbf6a0 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.870076] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9b5774-b9f1-4762-931c-884385408741 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.882569] env[69227]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e4f828c3-838f-48ff-8767-06497d8df0b6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2505.884519] env[69227]: DEBUG oslo_vmware.api [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Task: {'id': task-3475237, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064488} completed successfully. {{(pid=69227) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2505.884772] env[69227]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleted the datastore file {{(pid=69227) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2505.885034] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Deleted contents of the VM from datastore datastore2 {{(pid=69227) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2505.885129] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2505.885299] env[69227]: INFO nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2505.887484] env[69227]: DEBUG nova.compute.claims [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Aborting claim: {{(pid=69227) abort /opt/stack/nova/nova/compute/claims.py:84}} [ 2505.887649] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2505.904147] env[69227]: DEBUG nova.virt.vmwareapi.images [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] [instance: 0b31dc0b-6a70-41aa-adbe-d989a002adca] Downloading image file data 78c61090-3613-43e2-b8eb-045dfd47af0c to the data store datastore2 {{(pid=69227) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2505.969090] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2506.028903] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Completed reading data from the image iterator. {{(pid=69227) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2506.029079] env[69227]: DEBUG oslo_vmware.rw_handles [None req-f49e20a6-cd89-46fc-bf35-c14f74024da9 tempest-AttachVolumeShelveTestJSON-152554792 tempest-AttachVolumeShelveTestJSON-152554792-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/920e4c65-8fe9-4dd6-953b-981473ec72b1/78c61090-3613-43e2-b8eb-045dfd47af0c/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69227) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 84d5494a-c08b-45be-a35a-860e64fdf76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 0b31dc0b-6a70-41aa-adbe-d989a002adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 2d191d85-97d5-4b5e-9e1b-1fdd68754e4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Instance 247d620f-a6ed-4b40-9cc5-269c6f99c487 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69227) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2506.551020] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=69227) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2506.606060] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0b5f18-708a-400e-b38b-db15eb416ed6 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.611990] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f44dda1-7d9e-42f3-87b6-343f930aaf71 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.642200] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8852b9e7-9330-4092-826f-569fe71e905c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.649048] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbd3054-ca38-4810-bf78-808500055529 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2506.661936] env[69227]: DEBUG nova.compute.provider_tree [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2507.167059] env[69227]: DEBUG nova.scheduler.client.report [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2507.674025] env[69227]: DEBUG nova.compute.resource_tracker [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69227) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2507.674025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.180s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2507.674025] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 1.784s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2508.233057] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206c79b8-0d22-4707-a877-03a6ceab0fdb {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.239201] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6afb33b5-6713-4038-b4a4-9f3b56b2ff6f {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.268587] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83170997-5153-4bc4-9444-a28cb1dc1844 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.276026] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1e5b53-3954-402f-aa91-aad4eecd735c {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2508.290480] env[69227]: DEBUG nova.compute.provider_tree [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed in ProviderTree for provider: 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b {{(pid=69227) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2508.796022] env[69227]: DEBUG nova.scheduler.client.report [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Inventory has not changed for provider 30ebb745-06e2-4c8d-a7ac-d4905bfa3a7b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 93, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69227) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2509.301733] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.628s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2509.301733] env[69227]: ERROR nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2509.301733] env[69227]: Faults: ['InvalidArgument'] [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Traceback (most recent call last): [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self.driver.spawn(context, instance, image_meta, [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self._fetch_image_if_missing(context, vi) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] image_cache(vi, tmp_image_ds_loc) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] vm_util.copy_virtual_disk( [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] session._wait_for_task(vmdk_copy_task) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return self.wait_for_task(task_ref) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return evt.wait() [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] result = hub.switch() [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] return self.greenlet.switch() [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] self.f(*self.args, **self.kw) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] raise exceptions.translate_fault(task_info.error) [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Faults: ['InvalidArgument'] [ 2509.301733] env[69227]: ERROR nova.compute.manager [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] [ 2509.301733] env[69227]: DEBUG nova.compute.utils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] VimFaultException {{(pid=69227) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2509.303520] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Build of instance 84d5494a-c08b-45be-a35a-860e64fdf76f was re-scheduled: A specified parameter was not correct: fileType [ 2509.303520] env[69227]: Faults: ['InvalidArgument'] {{(pid=69227) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 2509.304144] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Unplugging VIFs for instance {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 2509.304472] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=69227) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 2509.304795] env[69227]: DEBUG nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2509.306832] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2510.114283] env[69227]: DEBUG nova.network.neutron [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2510.616734] env[69227]: INFO nova.compute.manager [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Took 1.31 seconds to deallocate network for instance. [ 2511.650411] env[69227]: INFO nova.scheduler.client.report [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Deleted allocations for instance 84d5494a-c08b-45be-a35a-860e64fdf76f [ 2512.162060] env[69227]: DEBUG oslo_concurrency.lockutils [None req-cc7e179e-b1b0-4788-a422-b1b8e45948d8 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 681.218s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2512.162060] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 484.816s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2512.162355] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 2512.162518] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2512.162682] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2512.165761] env[69227]: INFO nova.compute.manager [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Terminating instance [ 2512.167550] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquiring lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2512.167702] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Acquired lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 2512.167877] env[69227]: DEBUG nova.network.neutron [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Building network info cache for instance {{(pid=69227) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2512.702682] env[69227]: DEBUG nova.network.neutron [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2512.774931] env[69227]: DEBUG nova.network.neutron [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2513.277071] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Releasing lock "refresh_cache-84d5494a-c08b-45be-a35a-860e64fdf76f" {{(pid=69227) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 2513.277294] env[69227]: DEBUG nova.compute.manager [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Start destroying the instance on the hypervisor. {{(pid=69227) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 2513.277451] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Destroying instance {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2513.277756] env[69227]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4cb975e-a9f4-4a96-8537-e2f497017883 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.286716] env[69227]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e61de91-4ced-402c-b950-403ad3fd8a94 {{(pid=69227) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2513.311534] env[69227]: WARNING nova.virt.vmwareapi.vmops [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 84d5494a-c08b-45be-a35a-860e64fdf76f could not be found. [ 2513.311709] env[69227]: DEBUG nova.virt.vmwareapi.vmops [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance destroyed {{(pid=69227) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2513.311883] env[69227]: INFO nova.compute.manager [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 2513.312125] env[69227]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69227) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 2513.312329] env[69227]: DEBUG nova.compute.manager [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Deallocating network for instance {{(pid=69227) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 2513.312423] env[69227]: DEBUG nova.network.neutron [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] deallocate_for_instance() {{(pid=69227) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2513.328799] env[69227]: DEBUG nova.network.neutron [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Instance cache missing network info. {{(pid=69227) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2513.831165] env[69227]: DEBUG nova.network.neutron [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Updating instance_info_cache with network_info: [] {{(pid=69227) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2514.333661] env[69227]: INFO nova.compute.manager [-] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] Took 1.02 seconds to deallocate network for instance. [ 2515.362046] env[69227]: DEBUG oslo_concurrency.lockutils [None req-021813c5-c18b-42d5-a997-69682fdccac3 tempest-ServersTestJSON-1746052672 tempest-ServersTestJSON-1746052672-project-member] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.198s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2515.362046] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 122.747s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 2515.362046] env[69227]: INFO nova.compute.manager [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] [instance: 84d5494a-c08b-45be-a35a-860e64fdf76f] During sync_power_state the instance has a pending task (spawning). Skip. [ 2515.362046] env[69227]: DEBUG oslo_concurrency.lockutils [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Lock "84d5494a-c08b-45be-a35a-860e64fdf76f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69227) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 2521.670587] env[69227]: DEBUG oslo_service.periodic_task [None req-1880c1b0-d1ca-4669-9f16-cdba01d4c577 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69227) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}